Skip to content

Files (V1)

Service for managing files attached to entities.

Source code in affinity/services/v1_only.py
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
class EntityFileService:
    """Service for managing files attached to entities."""

    def __init__(self, client: HTTPClient):
        self._client = client

    def _validate_exactly_one_target(
        self,
        *,
        person_id: PersonId | None,
        company_id: CompanyId | None,
        opportunity_id: OpportunityId | None,
    ) -> None:
        targets = [person_id, company_id, opportunity_id]
        count = sum(1 for t in targets if t is not None)
        if count == 1:
            return
        if count == 0:
            raise ValueError("Exactly one of person_id, company_id, or opportunity_id is required")
        raise ValueError("Only one of person_id, company_id, or opportunity_id may be provided")

    def list(
        self,
        *,
        person_id: PersonId | None = None,
        company_id: CompanyId | None = None,
        opportunity_id: OpportunityId | None = None,
        page_size: int | None = None,
        page_token: str | None = None,
    ) -> PaginatedResponse[EntityFile]:
        """Get files attached to an entity."""
        self._validate_exactly_one_target(
            person_id=person_id,
            company_id=company_id,
            opportunity_id=opportunity_id,
        )
        params: dict[str, Any] = {}
        if person_id is not None:
            params["person_id"] = int(person_id)
        if company_id is not None:
            params["organization_id"] = int(company_id)
        if opportunity_id is not None:
            params["opportunity_id"] = int(opportunity_id)
        if page_size is not None:
            params["page_size"] = page_size
        if page_token is not None:
            params["page_token"] = page_token

        data = self._client.get("/entity-files", params=params or None, v1=True)
        items = (
            data.get("entity_files")
            or data.get("entityFiles")
            or data.get("files")
            or data.get("data", [])
        )
        if not isinstance(items, list):
            items = []
        return PaginatedResponse[EntityFile](
            data=[EntityFile.model_validate(f) for f in items],
            next_page_token=data.get("next_page_token") or data.get("nextPageToken"),
        )

    def get(self, file_id: FileId) -> EntityFile:
        """Get file metadata."""
        data = self._client.get(f"/entity-files/{file_id}", v1=True)
        return EntityFile.model_validate(data)

    def download(
        self,
        file_id: FileId,
        *,
        timeout: httpx.Timeout | float | None = None,
        deadline_seconds: float | None = None,
    ) -> bytes:
        """Download file content."""
        return self._client.download_file(
            f"/entity-files/download/{file_id}",
            v1=True,
            timeout=timeout,
            deadline_seconds=deadline_seconds,
        )

    def get_download_url(
        self,
        file_id: FileId,
        *,
        timeout: httpx.Timeout | float | None = None,
    ) -> PresignedUrl:
        """
        Get a presigned download URL for a file without downloading its content.

        The returned URL is valid for approximately 60 seconds and can be
        fetched without authentication (it's self-authenticating via signature).

        Args:
            file_id: The entity file ID
            timeout: Optional request timeout

        Returns:
            PresignedUrl with the URL, file metadata, and expiration info

        Raises:
            AffinityError: If the API doesn't return a redirect URL
        """
        # Fetch file metadata first
        file_meta = self.get(file_id)

        url = self._client.get_redirect_url(
            f"/entity-files/download/{file_id}",
            v1=True,
            timeout=timeout,
        )
        if not url:
            raise AffinityError(
                f"Failed to get presigned URL for file {file_id}: no redirect returned"
            )

        # Parse X-Amz-Expires from the presigned URL to determine TTL
        # Default to 60 seconds if not found (Affinity's typical TTL)
        parsed = urlparse(url)
        qs = parse_qs(parsed.query)
        expires_in = 60  # default
        if "X-Amz-Expires" in qs:
            with contextlib.suppress(ValueError, IndexError):
                expires_in = int(qs["X-Amz-Expires"][0])

        now = datetime.now(timezone.utc)
        expires_at = now + timedelta(seconds=expires_in)

        return PresignedUrl(
            url=url,
            file_id=int(file_id),
            name=file_meta.name,
            size=file_meta.size,
            content_type=file_meta.content_type,
            expires_in=expires_in,
            expires_at=expires_at,
        )

    def download_stream(
        self,
        file_id: FileId,
        *,
        chunk_size: int = 65_536,
        on_progress: ProgressCallback | None = None,
        timeout: httpx.Timeout | float | None = None,
        deadline_seconds: float | None = None,
    ) -> Iterator[bytes]:
        """Stream-download file content in chunks."""
        return self._client.stream_download(
            f"/entity-files/download/{file_id}",
            v1=True,
            chunk_size=chunk_size,
            on_progress=on_progress,
            timeout=timeout,
            deadline_seconds=deadline_seconds,
        )

    def download_stream_with_info(
        self,
        file_id: FileId,
        *,
        chunk_size: int = 65_536,
        on_progress: ProgressCallback | None = None,
        timeout: httpx.Timeout | float | None = None,
        deadline_seconds: float | None = None,
    ) -> DownloadedFile:
        """
        Stream-download a file and return response metadata (headers/filename/size).

        Notes:
        - `filename` is derived from `Content-Disposition` when present.
        - If the server does not provide a filename, callers can fall back to
          `files.get(file_id).name`.
        """
        return self._client.stream_download_with_info(
            f"/entity-files/download/{file_id}",
            v1=True,
            chunk_size=chunk_size,
            on_progress=on_progress,
            timeout=timeout,
            deadline_seconds=deadline_seconds,
        )

    def download_to(
        self,
        file_id: FileId,
        path: str | Path,
        *,
        overwrite: bool = False,
        chunk_size: int = 65_536,
        on_progress: ProgressCallback | None = None,
        timeout: httpx.Timeout | float | None = None,
        deadline_seconds: float | None = None,
    ) -> Path:
        """
        Download a file to disk.

        Args:
            file_id: The entity file id
            path: Destination path
            overwrite: If False, raises FileExistsError when path exists
            chunk_size: Bytes per chunk

        Returns:
            The destination path
        """
        target = Path(path)
        if target.exists() and not overwrite:
            raise FileExistsError(str(target))

        try:
            with target.open("wb") as f:
                for chunk in self.download_stream(
                    file_id,
                    chunk_size=chunk_size,
                    on_progress=on_progress,
                    timeout=timeout,
                    deadline_seconds=deadline_seconds,
                ):
                    f.write(chunk)
        except Exception:
            # Clean up partial file on error
            if target.exists():
                target.unlink()
            raise

        return target

    def upload(
        self,
        files: dict[str, Any],
        *,
        person_id: PersonId | None = None,
        company_id: CompanyId | None = None,
        opportunity_id: OpportunityId | None = None,
    ) -> bool:
        """
        Upload files to an entity.

        Args:
            files: Dict of filename to file-like object
            person_id: Person to attach to
            company_id: Company to attach to
            opportunity_id: Opportunity to attach to

        Returns:
            List of created file records
        """
        self._validate_exactly_one_target(
            person_id=person_id,
            company_id=company_id,
            opportunity_id=opportunity_id,
        )
        data: dict[str, Any] = {}
        if person_id:
            data["person_id"] = int(person_id)
        if company_id:
            data["organization_id"] = int(company_id)
        if opportunity_id:
            data["opportunity_id"] = int(opportunity_id)

        result = self._client.upload_file(
            "/entity-files",
            files=files,
            data=data,
            v1=True,
        )
        if "success" in result:
            return bool(result.get("success"))
        # If the API returns something else on success (e.g., created object),
        # treat any 2xx JSON response as success (4xx/5xx raise earlier).
        return True

    def upload_path(
        self,
        path: str | Path,
        *,
        person_id: PersonId | None = None,
        company_id: CompanyId | None = None,
        opportunity_id: OpportunityId | None = None,
        filename: str | None = None,
        content_type: str | None = None,
        on_progress: ProgressCallback | None = None,
    ) -> bool:
        """
        Upload a file from disk.

        Notes:
        - Returns only a boolean because the API returns `{"success": true}` for uploads.
        - Progress reporting is best-effort for uploads (start/end only).
        """
        self._validate_exactly_one_target(
            person_id=person_id,
            company_id=company_id,
            opportunity_id=opportunity_id,
        )

        p = Path(path)
        upload_filename = filename or p.name
        guessed, _ = mimetypes.guess_type(upload_filename)
        final_content_type = content_type or guessed or "application/octet-stream"
        total = p.stat().st_size

        if on_progress:
            on_progress(0, total, phase="upload")

        with p.open("rb") as f:
            ok = self.upload(
                files={"file": (upload_filename, f, final_content_type)},
                person_id=person_id,
                company_id=company_id,
                opportunity_id=opportunity_id,
            )

        if on_progress:
            on_progress(total, total, phase="upload")

        return ok

    def upload_bytes(
        self,
        data: bytes,
        filename: str,
        *,
        person_id: PersonId | None = None,
        company_id: CompanyId | None = None,
        opportunity_id: OpportunityId | None = None,
        content_type: str | None = None,
        on_progress: ProgressCallback | None = None,
    ) -> bool:
        """
        Upload in-memory bytes as a file.

        Notes:
        - Returns only a boolean because the API returns `{"success": true}` for uploads.
        - Progress reporting is best-effort for uploads (start/end only).
        """
        self._validate_exactly_one_target(
            person_id=person_id,
            company_id=company_id,
            opportunity_id=opportunity_id,
        )

        guessed, _ = mimetypes.guess_type(filename)
        final_content_type = content_type or guessed or "application/octet-stream"
        total = len(data)

        if on_progress:
            on_progress(0, total, phase="upload")

        ok = self.upload(
            files={"file": (filename, data, final_content_type)},
            person_id=person_id,
            company_id=company_id,
            opportunity_id=opportunity_id,
        )

        if on_progress:
            on_progress(total, total, phase="upload")

        return ok

    def all(
        self,
        *,
        person_id: PersonId | None = None,
        company_id: CompanyId | None = None,
        opportunity_id: OpportunityId | None = None,
    ) -> Iterator[EntityFile]:
        """Iterate through all files for an entity with automatic pagination."""
        self._validate_exactly_one_target(
            person_id=person_id,
            company_id=company_id,
            opportunity_id=opportunity_id,
        )

        page_token: str | None = None
        while True:
            page = self.list(
                person_id=person_id,
                company_id=company_id,
                opportunity_id=opportunity_id,
                page_token=page_token,
            )
            yield from page.data
            if not page.has_next:
                break
            page_token = page.next_page_token

    def iter(
        self,
        *,
        person_id: PersonId | None = None,
        company_id: CompanyId | None = None,
        opportunity_id: OpportunityId | None = None,
    ) -> Iterator[EntityFile]:
        """Auto-paginate all files (alias for `all()`)."""
        return self.all(
            person_id=person_id,
            company_id=company_id,
            opportunity_id=opportunity_id,
        )

all(*, person_id: PersonId | None = None, company_id: CompanyId | None = None, opportunity_id: OpportunityId | None = None) -> Iterator[EntityFile]

Iterate through all files for an entity with automatic pagination.

Source code in affinity/services/v1_only.py
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
def all(
    self,
    *,
    person_id: PersonId | None = None,
    company_id: CompanyId | None = None,
    opportunity_id: OpportunityId | None = None,
) -> Iterator[EntityFile]:
    """Iterate through all files for an entity with automatic pagination."""
    self._validate_exactly_one_target(
        person_id=person_id,
        company_id=company_id,
        opportunity_id=opportunity_id,
    )

    page_token: str | None = None
    while True:
        page = self.list(
            person_id=person_id,
            company_id=company_id,
            opportunity_id=opportunity_id,
            page_token=page_token,
        )
        yield from page.data
        if not page.has_next:
            break
        page_token = page.next_page_token

download(file_id: FileId, *, timeout: httpx.Timeout | float | None = None, deadline_seconds: float | None = None) -> bytes

Download file content.

Source code in affinity/services/v1_only.py
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
def download(
    self,
    file_id: FileId,
    *,
    timeout: httpx.Timeout | float | None = None,
    deadline_seconds: float | None = None,
) -> bytes:
    """Download file content."""
    return self._client.download_file(
        f"/entity-files/download/{file_id}",
        v1=True,
        timeout=timeout,
        deadline_seconds=deadline_seconds,
    )

download_stream(file_id: FileId, *, chunk_size: int = 65536, on_progress: ProgressCallback | None = None, timeout: httpx.Timeout | float | None = None, deadline_seconds: float | None = None) -> Iterator[bytes]

Stream-download file content in chunks.

Source code in affinity/services/v1_only.py
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
def download_stream(
    self,
    file_id: FileId,
    *,
    chunk_size: int = 65_536,
    on_progress: ProgressCallback | None = None,
    timeout: httpx.Timeout | float | None = None,
    deadline_seconds: float | None = None,
) -> Iterator[bytes]:
    """Stream-download file content in chunks."""
    return self._client.stream_download(
        f"/entity-files/download/{file_id}",
        v1=True,
        chunk_size=chunk_size,
        on_progress=on_progress,
        timeout=timeout,
        deadline_seconds=deadline_seconds,
    )

download_stream_with_info(file_id: FileId, *, chunk_size: int = 65536, on_progress: ProgressCallback | None = None, timeout: httpx.Timeout | float | None = None, deadline_seconds: float | None = None) -> DownloadedFile

Stream-download a file and return response metadata (headers/filename/size).

Notes: - filename is derived from Content-Disposition when present. - If the server does not provide a filename, callers can fall back to files.get(file_id).name.

Source code in affinity/services/v1_only.py
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
def download_stream_with_info(
    self,
    file_id: FileId,
    *,
    chunk_size: int = 65_536,
    on_progress: ProgressCallback | None = None,
    timeout: httpx.Timeout | float | None = None,
    deadline_seconds: float | None = None,
) -> DownloadedFile:
    """
    Stream-download a file and return response metadata (headers/filename/size).

    Notes:
    - `filename` is derived from `Content-Disposition` when present.
    - If the server does not provide a filename, callers can fall back to
      `files.get(file_id).name`.
    """
    return self._client.stream_download_with_info(
        f"/entity-files/download/{file_id}",
        v1=True,
        chunk_size=chunk_size,
        on_progress=on_progress,
        timeout=timeout,
        deadline_seconds=deadline_seconds,
    )

download_to(file_id: FileId, path: str | Path, *, overwrite: bool = False, chunk_size: int = 65536, on_progress: ProgressCallback | None = None, timeout: httpx.Timeout | float | None = None, deadline_seconds: float | None = None) -> Path

Download a file to disk.

Parameters:

Name Type Description Default
file_id FileId

The entity file id

required
path str | Path

Destination path

required
overwrite bool

If False, raises FileExistsError when path exists

False
chunk_size int

Bytes per chunk

65536

Returns:

Type Description
Path

The destination path

Source code in affinity/services/v1_only.py
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
def download_to(
    self,
    file_id: FileId,
    path: str | Path,
    *,
    overwrite: bool = False,
    chunk_size: int = 65_536,
    on_progress: ProgressCallback | None = None,
    timeout: httpx.Timeout | float | None = None,
    deadline_seconds: float | None = None,
) -> Path:
    """
    Download a file to disk.

    Args:
        file_id: The entity file id
        path: Destination path
        overwrite: If False, raises FileExistsError when path exists
        chunk_size: Bytes per chunk

    Returns:
        The destination path
    """
    target = Path(path)
    if target.exists() and not overwrite:
        raise FileExistsError(str(target))

    try:
        with target.open("wb") as f:
            for chunk in self.download_stream(
                file_id,
                chunk_size=chunk_size,
                on_progress=on_progress,
                timeout=timeout,
                deadline_seconds=deadline_seconds,
            ):
                f.write(chunk)
    except Exception:
        # Clean up partial file on error
        if target.exists():
            target.unlink()
        raise

    return target

get(file_id: FileId) -> EntityFile

Get file metadata.

Source code in affinity/services/v1_only.py
1337
1338
1339
1340
def get(self, file_id: FileId) -> EntityFile:
    """Get file metadata."""
    data = self._client.get(f"/entity-files/{file_id}", v1=True)
    return EntityFile.model_validate(data)

get_download_url(file_id: FileId, *, timeout: httpx.Timeout | float | None = None) -> PresignedUrl

Get a presigned download URL for a file without downloading its content.

The returned URL is valid for approximately 60 seconds and can be fetched without authentication (it's self-authenticating via signature).

Parameters:

Name Type Description Default
file_id FileId

The entity file ID

required
timeout Timeout | float | None

Optional request timeout

None

Returns:

Type Description
PresignedUrl

PresignedUrl with the URL, file metadata, and expiration info

Raises:

Type Description
AffinityError

If the API doesn't return a redirect URL

Source code in affinity/services/v1_only.py
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
def get_download_url(
    self,
    file_id: FileId,
    *,
    timeout: httpx.Timeout | float | None = None,
) -> PresignedUrl:
    """
    Get a presigned download URL for a file without downloading its content.

    The returned URL is valid for approximately 60 seconds and can be
    fetched without authentication (it's self-authenticating via signature).

    Args:
        file_id: The entity file ID
        timeout: Optional request timeout

    Returns:
        PresignedUrl with the URL, file metadata, and expiration info

    Raises:
        AffinityError: If the API doesn't return a redirect URL
    """
    # Fetch file metadata first
    file_meta = self.get(file_id)

    url = self._client.get_redirect_url(
        f"/entity-files/download/{file_id}",
        v1=True,
        timeout=timeout,
    )
    if not url:
        raise AffinityError(
            f"Failed to get presigned URL for file {file_id}: no redirect returned"
        )

    # Parse X-Amz-Expires from the presigned URL to determine TTL
    # Default to 60 seconds if not found (Affinity's typical TTL)
    parsed = urlparse(url)
    qs = parse_qs(parsed.query)
    expires_in = 60  # default
    if "X-Amz-Expires" in qs:
        with contextlib.suppress(ValueError, IndexError):
            expires_in = int(qs["X-Amz-Expires"][0])

    now = datetime.now(timezone.utc)
    expires_at = now + timedelta(seconds=expires_in)

    return PresignedUrl(
        url=url,
        file_id=int(file_id),
        name=file_meta.name,
        size=file_meta.size,
        content_type=file_meta.content_type,
        expires_in=expires_in,
        expires_at=expires_at,
    )

iter(*, person_id: PersonId | None = None, company_id: CompanyId | None = None, opportunity_id: OpportunityId | None = None) -> Iterator[EntityFile]

Auto-paginate all files (alias for all()).

Source code in affinity/services/v1_only.py
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
def iter(
    self,
    *,
    person_id: PersonId | None = None,
    company_id: CompanyId | None = None,
    opportunity_id: OpportunityId | None = None,
) -> Iterator[EntityFile]:
    """Auto-paginate all files (alias for `all()`)."""
    return self.all(
        person_id=person_id,
        company_id=company_id,
        opportunity_id=opportunity_id,
    )

list(*, person_id: PersonId | None = None, company_id: CompanyId | None = None, opportunity_id: OpportunityId | None = None, page_size: int | None = None, page_token: str | None = None) -> PaginatedResponse[EntityFile]

Get files attached to an entity.

Source code in affinity/services/v1_only.py
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
def list(
    self,
    *,
    person_id: PersonId | None = None,
    company_id: CompanyId | None = None,
    opportunity_id: OpportunityId | None = None,
    page_size: int | None = None,
    page_token: str | None = None,
) -> PaginatedResponse[EntityFile]:
    """Get files attached to an entity."""
    self._validate_exactly_one_target(
        person_id=person_id,
        company_id=company_id,
        opportunity_id=opportunity_id,
    )
    params: dict[str, Any] = {}
    if person_id is not None:
        params["person_id"] = int(person_id)
    if company_id is not None:
        params["organization_id"] = int(company_id)
    if opportunity_id is not None:
        params["opportunity_id"] = int(opportunity_id)
    if page_size is not None:
        params["page_size"] = page_size
    if page_token is not None:
        params["page_token"] = page_token

    data = self._client.get("/entity-files", params=params or None, v1=True)
    items = (
        data.get("entity_files")
        or data.get("entityFiles")
        or data.get("files")
        or data.get("data", [])
    )
    if not isinstance(items, list):
        items = []
    return PaginatedResponse[EntityFile](
        data=[EntityFile.model_validate(f) for f in items],
        next_page_token=data.get("next_page_token") or data.get("nextPageToken"),
    )

upload(files: dict[str, Any], *, person_id: PersonId | None = None, company_id: CompanyId | None = None, opportunity_id: OpportunityId | None = None) -> bool

Upload files to an entity.

Parameters:

Name Type Description Default
files dict[str, Any]

Dict of filename to file-like object

required
person_id PersonId | None

Person to attach to

None
company_id CompanyId | None

Company to attach to

None
opportunity_id OpportunityId | None

Opportunity to attach to

None

Returns:

Type Description
bool

List of created file records

Source code in affinity/services/v1_only.py
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
def upload(
    self,
    files: dict[str, Any],
    *,
    person_id: PersonId | None = None,
    company_id: CompanyId | None = None,
    opportunity_id: OpportunityId | None = None,
) -> bool:
    """
    Upload files to an entity.

    Args:
        files: Dict of filename to file-like object
        person_id: Person to attach to
        company_id: Company to attach to
        opportunity_id: Opportunity to attach to

    Returns:
        List of created file records
    """
    self._validate_exactly_one_target(
        person_id=person_id,
        company_id=company_id,
        opportunity_id=opportunity_id,
    )
    data: dict[str, Any] = {}
    if person_id:
        data["person_id"] = int(person_id)
    if company_id:
        data["organization_id"] = int(company_id)
    if opportunity_id:
        data["opportunity_id"] = int(opportunity_id)

    result = self._client.upload_file(
        "/entity-files",
        files=files,
        data=data,
        v1=True,
    )
    if "success" in result:
        return bool(result.get("success"))
    # If the API returns something else on success (e.g., created object),
    # treat any 2xx JSON response as success (4xx/5xx raise earlier).
    return True

upload_bytes(data: bytes, filename: str, *, person_id: PersonId | None = None, company_id: CompanyId | None = None, opportunity_id: OpportunityId | None = None, content_type: str | None = None, on_progress: ProgressCallback | None = None) -> bool

Upload in-memory bytes as a file.

Notes: - Returns only a boolean because the API returns {"success": true} for uploads. - Progress reporting is best-effort for uploads (start/end only).

Source code in affinity/services/v1_only.py
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
def upload_bytes(
    self,
    data: bytes,
    filename: str,
    *,
    person_id: PersonId | None = None,
    company_id: CompanyId | None = None,
    opportunity_id: OpportunityId | None = None,
    content_type: str | None = None,
    on_progress: ProgressCallback | None = None,
) -> bool:
    """
    Upload in-memory bytes as a file.

    Notes:
    - Returns only a boolean because the API returns `{"success": true}` for uploads.
    - Progress reporting is best-effort for uploads (start/end only).
    """
    self._validate_exactly_one_target(
        person_id=person_id,
        company_id=company_id,
        opportunity_id=opportunity_id,
    )

    guessed, _ = mimetypes.guess_type(filename)
    final_content_type = content_type or guessed or "application/octet-stream"
    total = len(data)

    if on_progress:
        on_progress(0, total, phase="upload")

    ok = self.upload(
        files={"file": (filename, data, final_content_type)},
        person_id=person_id,
        company_id=company_id,
        opportunity_id=opportunity_id,
    )

    if on_progress:
        on_progress(total, total, phase="upload")

    return ok

upload_path(path: str | Path, *, person_id: PersonId | None = None, company_id: CompanyId | None = None, opportunity_id: OpportunityId | None = None, filename: str | None = None, content_type: str | None = None, on_progress: ProgressCallback | None = None) -> bool

Upload a file from disk.

Notes: - Returns only a boolean because the API returns {"success": true} for uploads. - Progress reporting is best-effort for uploads (start/end only).

Source code in affinity/services/v1_only.py
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
def upload_path(
    self,
    path: str | Path,
    *,
    person_id: PersonId | None = None,
    company_id: CompanyId | None = None,
    opportunity_id: OpportunityId | None = None,
    filename: str | None = None,
    content_type: str | None = None,
    on_progress: ProgressCallback | None = None,
) -> bool:
    """
    Upload a file from disk.

    Notes:
    - Returns only a boolean because the API returns `{"success": true}` for uploads.
    - Progress reporting is best-effort for uploads (start/end only).
    """
    self._validate_exactly_one_target(
        person_id=person_id,
        company_id=company_id,
        opportunity_id=opportunity_id,
    )

    p = Path(path)
    upload_filename = filename or p.name
    guessed, _ = mimetypes.guess_type(upload_filename)
    final_content_type = content_type or guessed or "application/octet-stream"
    total = p.stat().st_size

    if on_progress:
        on_progress(0, total, phase="upload")

    with p.open("rb") as f:
        ok = self.upload(
            files={"file": (upload_filename, f, final_content_type)},
            person_id=person_id,
            company_id=company_id,
            opportunity_id=opportunity_id,
        )

    if on_progress:
        on_progress(total, total, phase="upload")

    return ok