Experimental browser for the Atmosphere
github.com/simonw/llm/i...
Apr 26, 2025, 1:25 PM
{ "uri": "at://did:plc:4z5i7njrld66ew36htufcwry/app.bsky.feed.post/3lnptm555cs2h", "cid": "bafyreiccvev7mvzpqh654mugu7btaj7qxovi2juohalpprruviizyvwghi", "value": { "text": "github.com/simonw/llm/i...", "$type": "app.bsky.feed.post", "embed": { "$type": "app.bsky.embed.external", "external": { "uri": "https://github.com/simonw/llm/issues/958", "thumb": { "$type": "blob", "ref": { "$link": "bafkreiajpflbej4yfdjaxstpy774wkddozqekep2mhrexxz2blo4ow5nuq" }, "mimeType": "image/jpeg", "size": 315632 }, "title": "Markdown serialization/deserialization of `llm` conversations · Issue #958 · simonw/llm", "description": "Right now, llm allows conversations to be exported with llm logs --cid X. What would take for llm to be able to read the exported conversations back? This issue is to figure out a way we could seri..." } }, "langs": [ "en" ], "reply": { "root": { "cid": "bafyreiae3y2fmpcr4cjkod6u4rooowqlyjctmocjl3lky2yolidmutekfi", "uri": "at://did:plc:4z5i7njrld66ew36htufcwry/app.bsky.feed.post/3lnn3b2umbs2c" }, "parent": { "cid": "bafyreih6oqbvsbc5k6zzo4p5vscnbrmnxuil5hnkkzfaxoxc5i6esyl27e", "uri": "at://did:plc:kft6lu4trxowqmter2b6vg6z/app.bsky.feed.post/3lnpsnhnxzc2c" } }, "facets": [ { "index": { "byteEnd": 26, "byteStart": 0 }, "features": [ { "uri": "https://github.com/simonw/llm/issues/958", "$type": "app.bsky.richtext.facet#link" } ] } ], "createdAt": "2025-04-26T13:25:58.559Z" } }