Documentation
LinksResource
Access via client.links
Create & Crawl
create()
Create a new link (bookmark) with optional auto-crawl for Open Graph metadata
async def create( url: str, # Must start with http:// or https:// *, title: Optional[str] = None, # Max 500 chars tags: Optional[list[str]] = None, # Max 40 tags, each max 50 chars folder_id: Optional[str] = None, auto_crawl: bool = True,) -> CreateLinkResultReturns: CreateLinkResult
create_and_wait()
Create a link and wait for crawl to complete (convenience method)
async def create_and_wait( url: str, *, title: Optional[str] = None, tags: Optional[list[str]] = None, folder_id: Optional[str] = None, timeout: Optional[float] = None, # Default: 60s) -> LinkDetailsReturns: LinkDetails - Full details with completed crawl status and OG metadata
recrawl()
Recrawl a link to refresh its metadata (rate limited to once per hour)
async def recrawl(link_id: str) -> RecrawlLinkResultReturns: RecrawlLinkResult
wait_for_crawl()
Poll until link crawl completes or fails
async def wait_for_crawl( link_id: str, *, timeout: Optional[float] = None, # Default: 60s poll_interval: Optional[float] = None, # Default: 1s) -> LinkDetailsReturns: LinkDetails
Read
get()
Get detailed information about a link
async def get(link_id: str) -> LinkDetailsReturns: LinkDetails - Full link info with OG metadata, crawl status, extracted images
list()
List links with optional filtering and pagination
async def list( *, search: Optional[str] = None, tags: Optional[list[str]] = None, folder_id: Optional[str] = None, crawl_status: Optional[str] = None, date_from: Optional[datetime] = None, date_to: Optional[datetime] = None, limit: int = 20, # 1-100 offset: int = 0, sort_by: str = "created_at", # "created_at" | "title" sort_order: str = "desc", # "asc" | "desc") -> LinkListReturns: LinkList
list_all()
Auto-paginating async iterator that yields all links
async def list_all( *, search: Optional[str] = None, tags: Optional[list[str]] = None, folder_id: Optional[str] = None, crawl_status: Optional[str] = None, date_from: Optional[datetime] = None, date_to: Optional[datetime] = None, sort_by: str = "created_at", sort_order: str = "desc", page_size: int = 50,) -> AsyncIterator[LinkItem]Yields: LinkItem objects one at a time, automatically paginating through all results
Update & Delete
update()
Update link metadata (title and/or tags)
async def update( link_id: str, *, title: Optional[str] = None, # Max 500 chars tags: Optional[list[str]] = None, # Max 40 tags, each max 50 chars) -> LinkUpdateResultReturns: LinkUpdateResult
batch_delete()
Delete multiple links in one operation (max 100, no duplicates)
async def batch_delete(link_ids: list[str]) -> BatchDeleteFilesResponseReturns: BatchDeleteFilesResponse