add skill
This commit is contained in:
parent
f830b8bb08
commit
611bd23a7c
9
config/local-upload.yaml
Normal file
9
config/local-upload.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
# Default bucket (used when no bucket specified)
|
||||
default: local
|
||||
|
||||
# Bucket configurations
|
||||
buckets:
|
||||
local:
|
||||
mode: local
|
||||
endpoint: http://localhost:8001/public
|
||||
public_dir: public # relative to cwd, or use absolute path
|
||||
8
config/s3-upload-sparticle.yaml
Normal file
8
config/s3-upload-sparticle.yaml
Normal file
@ -0,0 +1,8 @@
|
||||
default: local
|
||||
|
||||
# Bucket configurations
|
||||
buckets:
|
||||
local:
|
||||
mode: local
|
||||
endpoint: http://localhost:8001/public
|
||||
public_dir: public # relative to cwd, or use absolute path
|
||||
@ -34,6 +34,7 @@ services:
|
||||
- DEFAULT_THINKING_ENABLE=true
|
||||
# PostgreSQL 配置
|
||||
- CHECKPOINT_DB_URL=postgresql://postgres:E5ACJo6zJub4QS@postgres:5432/agent_db
|
||||
- R2_UPLOAD_CONFIG=/app/config/local-upload.yaml
|
||||
volumes:
|
||||
# 挂载项目数据目录
|
||||
- ./projects:/app/projects
|
||||
|
||||
@ -13,6 +13,7 @@ services:
|
||||
- BACKEND_HOST=http://api-dev.gbase.ai
|
||||
- MAX_CONTEXT_TOKENS=262144
|
||||
- DEFAULT_THINKING_ENABLE=true
|
||||
- R2_UPLOAD_CONFIG=/app/config/s3-upload-sparticle.yaml
|
||||
volumes:
|
||||
# 挂载项目数据目录
|
||||
- ./projects:/app/projects
|
||||
|
||||
211
skills/imap-smtp-email/SKILL.md
Normal file
211
skills/imap-smtp-email/SKILL.md
Normal file
@ -0,0 +1,211 @@
|
||||
---
|
||||
name: imap-smtp-email
|
||||
description: Read and send email via IMAP/SMTP. Check for new/unread messages, fetch content, search mailboxes, mark as read/unread, and send emails with attachments. Works with any IMAP/SMTP server including Gmail, Outlook, 163.com, vip.163.com, 126.com, vip.126.com, 188.com, and vip.188.com.
|
||||
metadata:
|
||||
openclaw:
|
||||
emoji: "📧"
|
||||
requires:
|
||||
env:
|
||||
- IMAP_HOST
|
||||
- IMAP_USER
|
||||
- IMAP_PASS
|
||||
- SMTP_HOST
|
||||
- SMTP_USER
|
||||
- SMTP_PASS
|
||||
bins:
|
||||
- node
|
||||
- npm
|
||||
primaryEnv: SMTP_PASS
|
||||
---
|
||||
|
||||
# IMAP/SMTP Email Tool
|
||||
|
||||
Read, search, and manage email via IMAP protocol. Send email via SMTP. Supports Gmail, Outlook, 163.com, vip.163.com, 126.com, vip.126.com, 188.com, vip.188.com, and any standard IMAP/SMTP server.
|
||||
|
||||
## Configuration
|
||||
|
||||
Create `.env` in the skill folder or set environment variables:
|
||||
|
||||
```bash
|
||||
# IMAP Configuration (receiving email)
|
||||
IMAP_HOST=imap.gmail.com # Server hostname
|
||||
IMAP_PORT=993 # Server port
|
||||
IMAP_USER=your@email.com
|
||||
IMAP_PASS=your_password
|
||||
IMAP_TLS=true # Use TLS/SSL connection
|
||||
IMAP_REJECT_UNAUTHORIZED=true # Set to false for self-signed certs
|
||||
IMAP_MAILBOX=INBOX # Default mailbox
|
||||
|
||||
# SMTP Configuration (sending email)
|
||||
SMTP_HOST=smtp.gmail.com # SMTP server hostname
|
||||
SMTP_PORT=587 # SMTP port (587 for STARTTLS, 465 for SSL)
|
||||
SMTP_SECURE=false # true for SSL (465), false for STARTTLS (587)
|
||||
SMTP_USER=your@gmail.com # Your email address
|
||||
SMTP_PASS=your_password # Your password or app password
|
||||
SMTP_FROM=your@gmail.com # Default sender email (optional)
|
||||
SMTP_REJECT_UNAUTHORIZED=true # Set to false for self-signed certs
|
||||
```
|
||||
|
||||
## Common Email Servers
|
||||
|
||||
| Provider | IMAP Host | IMAP Port | SMTP Host | SMTP Port |
|
||||
|----------|-----------|-----------|-----------|-----------|
|
||||
| 163.com | imap.163.com | 993 | smtp.163.com | 465 |
|
||||
| vip.163.com | imap.vip.163.com | 993 | smtp.vip.163.com | 465 |
|
||||
| 126.com | imap.126.com | 993 | smtp.126.com | 465 |
|
||||
| vip.126.com | imap.vip.126.com | 993 | smtp.vip.126.com | 465 |
|
||||
| 188.com | imap.188.com | 993 | smtp.188.com | 465 |
|
||||
| vip.188.com | imap.vip.188.com | 993 | smtp.vip.188.com | 465 |
|
||||
| yeah.net | imap.yeah.net | 993 | smtp.yeah.net | 465 |
|
||||
| Gmail | imap.gmail.com | 993 | smtp.gmail.com | 587 |
|
||||
| Outlook | outlook.office365.com | 993 | smtp.office365.com | 587 |
|
||||
| QQ Mail | imap.qq.com | 993 | smtp.qq.com | 587 |
|
||||
|
||||
**Important for Gmail:**
|
||||
- Gmail does **not** accept your regular account password
|
||||
- You must generate an **App Password**: https://myaccount.google.com/apppasswords
|
||||
- Use the generated 16-character App Password as `IMAP_PASS` / `SMTP_PASS`
|
||||
- Requires Google Account with 2-Step Verification enabled
|
||||
|
||||
**Important for 163.com:**
|
||||
- Use **authorization code** (授权码), not account password
|
||||
- Enable IMAP/SMTP in web settings first
|
||||
|
||||
## IMAP Commands (Receiving Email)
|
||||
|
||||
### check
|
||||
Check for new/unread emails.
|
||||
|
||||
```bash
|
||||
node scripts/imap.js check [--limit 10] [--mailbox INBOX] [--recent 2h]
|
||||
```
|
||||
|
||||
Options:
|
||||
- `--limit <n>`: Max results (default: 10)
|
||||
- `--mailbox <name>`: Mailbox to check (default: INBOX)
|
||||
- `--recent <time>`: Only show emails from last X time (e.g., 30m, 2h, 7d)
|
||||
|
||||
### fetch
|
||||
Fetch full email content by UID.
|
||||
|
||||
```bash
|
||||
node scripts/imap.js fetch <uid> [--mailbox INBOX]
|
||||
```
|
||||
|
||||
### download
|
||||
Download all attachments from an email, or a specific attachment.
|
||||
|
||||
```bash
|
||||
node scripts/imap.js download <uid> [--mailbox INBOX] [--dir <path>] [--file <filename>]
|
||||
```
|
||||
|
||||
Options:
|
||||
- `--mailbox <name>`: Mailbox (default: INBOX)
|
||||
- `--dir <path>`: Output directory (default: current directory)
|
||||
- `--file <filename>`: Download only the specified attachment (default: download all)
|
||||
|
||||
### search
|
||||
Search emails with filters.
|
||||
|
||||
```bash
|
||||
node scripts/imap.js search [options]
|
||||
|
||||
Options:
|
||||
--unseen Only unread messages
|
||||
--seen Only read messages
|
||||
--from <email> From address contains
|
||||
--subject <text> Subject contains
|
||||
--recent <time> From last X time (e.g., 30m, 2h, 7d)
|
||||
--since <date> After date (YYYY-MM-DD)
|
||||
--before <date> Before date (YYYY-MM-DD)
|
||||
--limit <n> Max results (default: 20)
|
||||
--mailbox <name> Mailbox to search (default: INBOX)
|
||||
```
|
||||
|
||||
### mark-read / mark-unread
|
||||
Mark message(s) as read or unread.
|
||||
|
||||
```bash
|
||||
node scripts/imap.js mark-read <uid> [uid2 uid3...]
|
||||
node scripts/imap.js mark-unread <uid> [uid2 uid3...]
|
||||
```
|
||||
|
||||
### list-mailboxes
|
||||
List all available mailboxes/folders.
|
||||
|
||||
```bash
|
||||
node scripts/imap.js list-mailboxes
|
||||
```
|
||||
|
||||
## SMTP Commands (Sending Email)
|
||||
|
||||
### send
|
||||
Send email via SMTP.
|
||||
|
||||
```bash
|
||||
node scripts/smtp.js send --to <email> --subject <text> [options]
|
||||
```
|
||||
|
||||
**Required:**
|
||||
- `--to <email>`: Recipient (comma-separated for multiple)
|
||||
- `--subject <text>`: Email subject, or `--subject-file <file>`
|
||||
|
||||
**Optional:**
|
||||
- `--body <text>`: Plain text body
|
||||
- `--html`: Send body as HTML
|
||||
- `--body-file <file>`: Read body from file
|
||||
- `--html-file <file>`: Read HTML from file
|
||||
- `--cc <email>`: CC recipients
|
||||
- `--bcc <email>`: BCC recipients
|
||||
- `--attach <file>`: Attachments (comma-separated)
|
||||
- `--from <email>`: Override default sender
|
||||
|
||||
**Examples:**
|
||||
```bash
|
||||
# Simple text email
|
||||
node scripts/smtp.js send --to recipient@example.com --subject "Hello" --body "World"
|
||||
|
||||
# HTML email
|
||||
node scripts/smtp.js send --to recipient@example.com --subject "Newsletter" --html --body "<h1>Welcome</h1>"
|
||||
|
||||
# Email with attachment
|
||||
node scripts/smtp.js send --to recipient@example.com --subject "Report" --body "Please find attached" --attach report.pdf
|
||||
|
||||
# Multiple recipients
|
||||
node scripts/smtp.js send --to "a@example.com,b@example.com" --cc "c@example.com" --subject "Update" --body "Team update"
|
||||
```
|
||||
|
||||
### test
|
||||
Test SMTP connection by sending a test email to yourself.
|
||||
|
||||
```bash
|
||||
node scripts/smtp.js test
|
||||
```
|
||||
|
||||
## Dependencies
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
## Security Notes
|
||||
|
||||
- Store credentials in `.env` (add to `.gitignore`)
|
||||
- **Gmail**: regular password is rejected — generate an App Password at https://myaccount.google.com/apppasswords
|
||||
- For 163.com: use authorization code (授权码), not account password
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Connection timeout:**
|
||||
- Verify server is running and accessible
|
||||
- Check host/port configuration
|
||||
|
||||
**Authentication failed:**
|
||||
- Verify username (usually full email address)
|
||||
- Check password is correct
|
||||
- For 163.com: use authorization code, not account password
|
||||
- For Gmail: regular password won't work — generate an App Password at https://myaccount.google.com/apppasswords
|
||||
|
||||
**TLS/SSL errors:**
|
||||
- Match `IMAP_TLS`/`SMTP_SECURE` setting to server requirements
|
||||
- For self-signed certs: set `IMAP_REJECT_UNAUTHORIZED=false` or `SMTP_REJECT_UNAUTHORIZED=false`
|
||||
588
skills/imap-smtp-email/scripts/imap.js
Normal file
588
skills/imap-smtp-email/scripts/imap.js
Normal file
@ -0,0 +1,588 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* IMAP Email CLI
|
||||
* Works with any standard IMAP server (Gmail, ProtonMail Bridge, Fastmail, etc.)
|
||||
* Supports IMAP ID extension (RFC 2971) for 163.com and other servers
|
||||
*/
|
||||
|
||||
const Imap = require('imap');
|
||||
const { simpleParser } = require('mailparser');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '../.env') });
|
||||
|
||||
function validateWritePath(dirPath) {
|
||||
const allowedDirsStr = process.env.ALLOWED_WRITE_DIRS;
|
||||
if (!allowedDirsStr) {
|
||||
throw new Error('ALLOWED_WRITE_DIRS not set in .env. Attachment download is disabled.');
|
||||
}
|
||||
|
||||
const resolved = path.resolve(dirPath.replace(/^~/, os.homedir()));
|
||||
|
||||
const allowedDirs = allowedDirsStr.split(',').map(d =>
|
||||
path.resolve(d.trim().replace(/^~/, os.homedir()))
|
||||
);
|
||||
|
||||
const allowed = allowedDirs.some(dir =>
|
||||
resolved === dir || resolved.startsWith(dir + path.sep)
|
||||
);
|
||||
|
||||
if (!allowed) {
|
||||
throw new Error(`Access denied: '${dirPath}' is outside allowed write directories`);
|
||||
}
|
||||
|
||||
return resolved;
|
||||
}
|
||||
|
||||
function sanitizeFilename(filename) {
|
||||
return path.basename(filename).replace(/\.\./g, '').replace(/^[./\\]/, '') || 'attachment';
|
||||
}
|
||||
|
||||
// IMAP ID information for 163.com compatibility
|
||||
const IMAP_ID = {
|
||||
name: 'openclaw',
|
||||
version: '0.0.1',
|
||||
vendor: 'netease',
|
||||
'support-email': 'kefu@188.com'
|
||||
};
|
||||
|
||||
const DEFAULT_MAILBOX = process.env.IMAP_MAILBOX || 'INBOX';
|
||||
|
||||
// Parse command-line arguments
|
||||
function parseArgs() {
|
||||
const args = process.argv.slice(2);
|
||||
const command = args[0];
|
||||
const options = {};
|
||||
const positional = [];
|
||||
|
||||
for (let i = 1; i < args.length; i++) {
|
||||
const arg = args[i];
|
||||
if (arg.startsWith('--')) {
|
||||
const key = arg.slice(2);
|
||||
const value = args[i + 1];
|
||||
options[key] = value || true;
|
||||
if (value && !value.startsWith('--')) i++;
|
||||
} else {
|
||||
positional.push(arg);
|
||||
}
|
||||
}
|
||||
|
||||
return { command, options, positional };
|
||||
}
|
||||
|
||||
// Create IMAP connection config
|
||||
function createImapConfig() {
|
||||
return {
|
||||
user: process.env.IMAP_USER,
|
||||
password: process.env.IMAP_PASS,
|
||||
host: process.env.IMAP_HOST || '127.0.0.1',
|
||||
port: parseInt(process.env.IMAP_PORT) || 1143,
|
||||
tls: process.env.IMAP_TLS === 'true',
|
||||
tlsOptions: {
|
||||
rejectUnauthorized: process.env.IMAP_REJECT_UNAUTHORIZED !== 'false',
|
||||
},
|
||||
connTimeout: 10000,
|
||||
authTimeout: 10000,
|
||||
};
|
||||
}
|
||||
|
||||
// Connect to IMAP server with ID support
|
||||
async function connect() {
|
||||
const config = createImapConfig();
|
||||
|
||||
if (!config.user || !config.password) {
|
||||
throw new Error('Missing IMAP_USER or IMAP_PASS environment variables');
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const imap = new Imap(config);
|
||||
|
||||
imap.once('ready', () => {
|
||||
// Send IMAP ID command for 163.com compatibility
|
||||
if (typeof imap.id === 'function') {
|
||||
imap.id(IMAP_ID, (err) => {
|
||||
if (err) {
|
||||
console.warn('Warning: IMAP ID command failed:', err.message);
|
||||
}
|
||||
resolve(imap);
|
||||
});
|
||||
} else {
|
||||
// ID not supported, continue without it
|
||||
resolve(imap);
|
||||
}
|
||||
});
|
||||
|
||||
imap.once('error', (err) => {
|
||||
reject(new Error(`IMAP connection failed: ${err.message}`));
|
||||
});
|
||||
|
||||
imap.connect();
|
||||
});
|
||||
}
|
||||
|
||||
// Open mailbox and return promise
|
||||
function openBox(imap, mailbox, readOnly = false) {
|
||||
return new Promise((resolve, reject) => {
|
||||
imap.openBox(mailbox, readOnly, (err, box) => {
|
||||
if (err) reject(err);
|
||||
else resolve(box);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Search for messages
|
||||
function searchMessages(imap, criteria, fetchOptions) {
|
||||
return new Promise((resolve, reject) => {
|
||||
imap.search(criteria, (err, results) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!results || results.length === 0) {
|
||||
resolve([]);
|
||||
return;
|
||||
}
|
||||
|
||||
const fetch = imap.fetch(results, fetchOptions);
|
||||
const messages = [];
|
||||
|
||||
fetch.on('message', (msg) => {
|
||||
const parts = [];
|
||||
|
||||
msg.on('body', (stream, info) => {
|
||||
let buffer = '';
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
buffer += chunk.toString('utf8');
|
||||
});
|
||||
|
||||
stream.once('end', () => {
|
||||
parts.push({ which: info.which, body: buffer });
|
||||
});
|
||||
});
|
||||
|
||||
msg.once('attributes', (attrs) => {
|
||||
parts.forEach((part) => {
|
||||
part.attributes = attrs;
|
||||
});
|
||||
});
|
||||
|
||||
msg.once('end', () => {
|
||||
if (parts.length > 0) {
|
||||
messages.push(parts[0]);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
fetch.once('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
fetch.once('end', () => {
|
||||
resolve(messages);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Parse email from raw buffer
|
||||
async function parseEmail(bodyStr, includeAttachments = false) {
|
||||
const parsed = await simpleParser(bodyStr);
|
||||
|
||||
return {
|
||||
from: parsed.from?.text || 'Unknown',
|
||||
to: parsed.to?.text,
|
||||
subject: parsed.subject || '(no subject)',
|
||||
date: parsed.date,
|
||||
text: parsed.text,
|
||||
html: parsed.html,
|
||||
snippet: parsed.text
|
||||
? parsed.text.slice(0, 200)
|
||||
: (parsed.html ? parsed.html.slice(0, 200).replace(/<[^>]*>/g, '') : ''),
|
||||
attachments: parsed.attachments?.map((a) => ({
|
||||
filename: a.filename,
|
||||
contentType: a.contentType,
|
||||
size: a.size,
|
||||
content: includeAttachments ? a.content : undefined,
|
||||
cid: a.cid,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
// Check for new/unread emails
|
||||
async function checkEmails(mailbox = DEFAULT_MAILBOX, limit = 10, recentTime = null, unreadOnly = false) {
|
||||
const imap = await connect();
|
||||
|
||||
try {
|
||||
await openBox(imap, mailbox);
|
||||
|
||||
// Build search criteria
|
||||
const searchCriteria = unreadOnly ? ['UNSEEN'] : ['ALL'];
|
||||
|
||||
if (recentTime) {
|
||||
const sinceDate = parseRelativeTime(recentTime);
|
||||
searchCriteria.push(['SINCE', sinceDate]);
|
||||
}
|
||||
|
||||
// Fetch messages sorted by date (newest first)
|
||||
const fetchOptions = {
|
||||
bodies: [''],
|
||||
markSeen: false,
|
||||
};
|
||||
|
||||
const messages = await searchMessages(imap, searchCriteria, fetchOptions);
|
||||
|
||||
// Sort by date (newest first) - parse from message attributes
|
||||
const sortedMessages = messages.sort((a, b) => {
|
||||
const dateA = a.attributes.date ? new Date(a.attributes.date) : new Date(0);
|
||||
const dateB = b.attributes.date ? new Date(b.attributes.date) : new Date(0);
|
||||
return dateB - dateA;
|
||||
}).slice(0, limit);
|
||||
|
||||
const results = [];
|
||||
|
||||
for (const item of sortedMessages) {
|
||||
const bodyStr = item.body;
|
||||
const parsed = await parseEmail(bodyStr);
|
||||
|
||||
results.push({
|
||||
uid: item.attributes.uid,
|
||||
...parsed,
|
||||
flags: item.attributes.flags,
|
||||
});
|
||||
}
|
||||
|
||||
return results;
|
||||
} finally {
|
||||
imap.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch full email by UID
|
||||
async function fetchEmail(uid, mailbox = DEFAULT_MAILBOX) {
|
||||
const imap = await connect();
|
||||
|
||||
try {
|
||||
await openBox(imap, mailbox);
|
||||
|
||||
const searchCriteria = [['UID', uid]];
|
||||
const fetchOptions = {
|
||||
bodies: [''],
|
||||
markSeen: false,
|
||||
};
|
||||
|
||||
const messages = await searchMessages(imap, searchCriteria, fetchOptions);
|
||||
|
||||
if (messages.length === 0) {
|
||||
throw new Error(`Message UID ${uid} not found`);
|
||||
}
|
||||
|
||||
const item = messages[0];
|
||||
const parsed = await parseEmail(item.body);
|
||||
|
||||
return {
|
||||
uid: item.attributes.uid,
|
||||
...parsed,
|
||||
flags: item.attributes.flags,
|
||||
};
|
||||
} finally {
|
||||
imap.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Download attachments from email
|
||||
async function downloadAttachments(uid, mailbox = DEFAULT_MAILBOX, outputDir = '.', specificFilename = null) {
|
||||
const imap = await connect();
|
||||
|
||||
try {
|
||||
await openBox(imap, mailbox);
|
||||
|
||||
const searchCriteria = [['UID', uid]];
|
||||
const fetchOptions = {
|
||||
bodies: [''],
|
||||
markSeen: false,
|
||||
};
|
||||
|
||||
const messages = await searchMessages(imap, searchCriteria, fetchOptions);
|
||||
|
||||
if (messages.length === 0) {
|
||||
throw new Error(`Message UID ${uid} not found`);
|
||||
}
|
||||
|
||||
const item = messages[0];
|
||||
const parsed = await parseEmail(item.body, true);
|
||||
|
||||
if (!parsed.attachments || parsed.attachments.length === 0) {
|
||||
return {
|
||||
uid,
|
||||
downloaded: [],
|
||||
message: 'No attachments found',
|
||||
};
|
||||
}
|
||||
|
||||
// Create output directory if it doesn't exist
|
||||
const resolvedDir = validateWritePath(outputDir);
|
||||
if (!fs.existsSync(resolvedDir)) {
|
||||
fs.mkdirSync(resolvedDir, { recursive: true });
|
||||
}
|
||||
|
||||
const downloaded = [];
|
||||
|
||||
for (const attachment of parsed.attachments) {
|
||||
// If specificFilename is provided, only download matching attachment
|
||||
if (specificFilename && attachment.filename !== specificFilename) {
|
||||
continue;
|
||||
}
|
||||
if (attachment.content) {
|
||||
const filePath = path.join(resolvedDir, sanitizeFilename(attachment.filename));
|
||||
fs.writeFileSync(filePath, attachment.content);
|
||||
downloaded.push({
|
||||
filename: attachment.filename,
|
||||
path: filePath,
|
||||
size: attachment.size,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// If specific file was requested but not found
|
||||
if (specificFilename && downloaded.length === 0) {
|
||||
const availableFiles = parsed.attachments.map(a => a.filename).join(', ');
|
||||
return {
|
||||
uid,
|
||||
downloaded: [],
|
||||
message: `File "${specificFilename}" not found. Available attachments: ${availableFiles}`,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
uid,
|
||||
downloaded,
|
||||
message: `Downloaded ${downloaded.length} attachment(s)`,
|
||||
};
|
||||
} finally {
|
||||
imap.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Parse relative time (e.g., "2h", "30m", "7d") to Date
|
||||
function parseRelativeTime(timeStr) {
|
||||
const match = timeStr.match(/^(\d+)(m|h|d)$/);
|
||||
if (!match) {
|
||||
throw new Error('Invalid time format. Use: 30m, 2h, 7d');
|
||||
}
|
||||
|
||||
const value = parseInt(match[1]);
|
||||
const unit = match[2];
|
||||
const now = new Date();
|
||||
|
||||
switch (unit) {
|
||||
case 'm': // minutes
|
||||
return new Date(now.getTime() - value * 60 * 1000);
|
||||
case 'h': // hours
|
||||
return new Date(now.getTime() - value * 60 * 60 * 1000);
|
||||
case 'd': // days
|
||||
return new Date(now.getTime() - value * 24 * 60 * 60 * 1000);
|
||||
default:
|
||||
throw new Error('Unknown time unit');
|
||||
}
|
||||
}
|
||||
|
||||
// Search emails with criteria
|
||||
async function searchEmails(options) {
|
||||
const imap = await connect();
|
||||
|
||||
try {
|
||||
const mailbox = options.mailbox || DEFAULT_MAILBOX;
|
||||
await openBox(imap, mailbox);
|
||||
|
||||
const criteria = [];
|
||||
|
||||
if (options.unseen) criteria.push('UNSEEN');
|
||||
if (options.seen) criteria.push('SEEN');
|
||||
if (options.from) criteria.push(['FROM', options.from]);
|
||||
if (options.subject) criteria.push(['SUBJECT', options.subject]);
|
||||
|
||||
// Handle relative time (--recent 2h)
|
||||
if (options.recent) {
|
||||
const sinceDate = parseRelativeTime(options.recent);
|
||||
criteria.push(['SINCE', sinceDate]);
|
||||
} else {
|
||||
// Handle absolute dates
|
||||
if (options.since) criteria.push(['SINCE', options.since]);
|
||||
if (options.before) criteria.push(['BEFORE', options.before]);
|
||||
}
|
||||
|
||||
// Default to all if no criteria
|
||||
if (criteria.length === 0) criteria.push('ALL');
|
||||
|
||||
const fetchOptions = {
|
||||
bodies: [''],
|
||||
markSeen: false,
|
||||
};
|
||||
|
||||
const messages = await searchMessages(imap, criteria, fetchOptions);
|
||||
const limit = parseInt(options.limit) || 20;
|
||||
const results = [];
|
||||
|
||||
// Sort by date (newest first)
|
||||
const sortedMessages = messages.sort((a, b) => {
|
||||
const dateA = a.attributes.date ? new Date(a.attributes.date) : new Date(0);
|
||||
const dateB = b.attributes.date ? new Date(b.attributes.date) : new Date(0);
|
||||
return dateB - dateA;
|
||||
}).slice(0, limit);
|
||||
|
||||
for (const item of sortedMessages) {
|
||||
const parsed = await parseEmail(item.body);
|
||||
results.push({
|
||||
uid: item.attributes.uid,
|
||||
...parsed,
|
||||
flags: item.attributes.flags,
|
||||
});
|
||||
}
|
||||
|
||||
return results;
|
||||
} finally {
|
||||
imap.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Mark message(s) as read
|
||||
async function markAsRead(uids, mailbox = DEFAULT_MAILBOX) {
|
||||
const imap = await connect();
|
||||
|
||||
try {
|
||||
await openBox(imap, mailbox);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
imap.addFlags(uids, '\\Seen', (err) => {
|
||||
if (err) reject(err);
|
||||
else resolve({ success: true, uids, action: 'marked as read' });
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
imap.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Mark message(s) as unread
|
||||
async function markAsUnread(uids, mailbox = DEFAULT_MAILBOX) {
|
||||
const imap = await connect();
|
||||
|
||||
try {
|
||||
await openBox(imap, mailbox);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
imap.delFlags(uids, '\\Seen', (err) => {
|
||||
if (err) reject(err);
|
||||
else resolve({ success: true, uids, action: 'marked as unread' });
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
imap.end();
|
||||
}
|
||||
}
|
||||
|
||||
// List all mailboxes
|
||||
async function listMailboxes() {
|
||||
const imap = await connect();
|
||||
|
||||
try {
|
||||
return new Promise((resolve, reject) => {
|
||||
imap.getBoxes((err, boxes) => {
|
||||
if (err) reject(err);
|
||||
else resolve(formatMailboxTree(boxes));
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
imap.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Format mailbox tree recursively
|
||||
function formatMailboxTree(boxes, prefix = '') {
|
||||
const result = [];
|
||||
for (const [name, info] of Object.entries(boxes)) {
|
||||
const fullName = prefix ? `${prefix}${info.delimiter}${name}` : name;
|
||||
result.push({
|
||||
name: fullName,
|
||||
delimiter: info.delimiter,
|
||||
attributes: info.attribs,
|
||||
});
|
||||
|
||||
if (info.children) {
|
||||
result.push(...formatMailboxTree(info.children, fullName));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Main CLI handler
|
||||
async function main() {
|
||||
const { command, options, positional } = parseArgs();
|
||||
|
||||
try {
|
||||
let result;
|
||||
|
||||
switch (command) {
|
||||
case 'check':
|
||||
result = await checkEmails(
|
||||
options.mailbox || DEFAULT_MAILBOX,
|
||||
parseInt(options.limit) || 10,
|
||||
options.recent || null,
|
||||
options.unseen === 'true' // if --unseen is set, only get unread messages
|
||||
);
|
||||
break;
|
||||
|
||||
case 'fetch':
|
||||
if (!positional[0]) {
|
||||
throw new Error('UID required: node imap.js fetch <uid>');
|
||||
}
|
||||
result = await fetchEmail(positional[0], options.mailbox);
|
||||
break;
|
||||
|
||||
case 'download':
|
||||
if (!positional[0]) {
|
||||
throw new Error('UID required: node imap.js download <uid>');
|
||||
}
|
||||
result = await downloadAttachments(positional[0], options.mailbox, options.dir || '.', options.file || null);
|
||||
break;
|
||||
|
||||
case 'search':
|
||||
result = await searchEmails(options);
|
||||
break;
|
||||
|
||||
case 'mark-read':
|
||||
if (positional.length === 0) {
|
||||
throw new Error('UID(s) required: node imap.js mark-read <uid> [uid2...]');
|
||||
}
|
||||
result = await markAsRead(positional, options.mailbox);
|
||||
break;
|
||||
|
||||
case 'mark-unread':
|
||||
if (positional.length === 0) {
|
||||
throw new Error('UID(s) required: node imap.js mark-unread <uid> [uid2...]');
|
||||
}
|
||||
result = await markAsUnread(positional, options.mailbox);
|
||||
break;
|
||||
|
||||
case 'list-mailboxes':
|
||||
result = await listMailboxes();
|
||||
break;
|
||||
|
||||
default:
|
||||
console.error('Unknown command:', command);
|
||||
console.error('Available commands: check, fetch, download, search, mark-read, mark-unread, list-mailboxes');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
} catch (err) {
|
||||
console.error('Error:', err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
234
skills/imap-smtp-email/scripts/smtp.js
Normal file
234
skills/imap-smtp-email/scripts/smtp.js
Normal file
@ -0,0 +1,234 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* SMTP Email CLI
|
||||
* Send email via SMTP protocol. Works with Gmail, Outlook, 163.com, and any standard SMTP server.
|
||||
* Supports attachments, HTML content, and multiple recipients.
|
||||
*/
|
||||
|
||||
const nodemailer = require('nodemailer');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const fs = require('fs');
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '../.env') });
|
||||
|
||||
function validateReadPath(inputPath) {
|
||||
let realPath;
|
||||
try {
|
||||
realPath = fs.realpathSync(inputPath);
|
||||
} catch {
|
||||
realPath = path.resolve(inputPath);
|
||||
}
|
||||
|
||||
const allowedDirsStr = process.env.ALLOWED_READ_DIRS;
|
||||
if (!allowedDirsStr) {
|
||||
throw new Error('ALLOWED_READ_DIRS not set in .env. File read operations are disabled.');
|
||||
}
|
||||
|
||||
const allowedDirs = allowedDirsStr.split(',').map(d =>
|
||||
path.resolve(d.trim().replace(/^~/, os.homedir()))
|
||||
);
|
||||
|
||||
const allowed = allowedDirs.some(dir =>
|
||||
realPath === dir || realPath.startsWith(dir + path.sep)
|
||||
);
|
||||
|
||||
if (!allowed) {
|
||||
throw new Error(`Access denied: '${inputPath}' is outside allowed read directories`);
|
||||
}
|
||||
|
||||
return realPath;
|
||||
}
|
||||
|
||||
// Parse command-line arguments
|
||||
function parseArgs() {
|
||||
const args = process.argv.slice(2);
|
||||
const command = args[0];
|
||||
const options = {};
|
||||
const positional = [];
|
||||
|
||||
for (let i = 1; i < args.length; i++) {
|
||||
const arg = args[i];
|
||||
if (arg.startsWith('--')) {
|
||||
const key = arg.slice(2);
|
||||
const value = args[i + 1];
|
||||
options[key] = value || true;
|
||||
if (value && !value.startsWith('--')) i++;
|
||||
} else {
|
||||
positional.push(arg);
|
||||
}
|
||||
}
|
||||
|
||||
return { command, options, positional };
|
||||
}
|
||||
|
||||
// Create SMTP transporter
|
||||
function createTransporter() {
|
||||
const config = {
|
||||
host: process.env.SMTP_HOST,
|
||||
port: parseInt(process.env.SMTP_PORT) || 587,
|
||||
secure: process.env.SMTP_SECURE === 'true', // true for 465, false for other ports
|
||||
auth: {
|
||||
user: process.env.SMTP_USER,
|
||||
pass: process.env.SMTP_PASS,
|
||||
},
|
||||
tls: {
|
||||
rejectUnauthorized: process.env.SMTP_REJECT_UNAUTHORIZED !== 'false',
|
||||
},
|
||||
};
|
||||
|
||||
if (!config.host || !config.auth.user || !config.auth.pass) {
|
||||
throw new Error('Missing SMTP configuration. Please set SMTP_HOST, SMTP_USER, and SMTP_PASS in .env');
|
||||
}
|
||||
|
||||
return nodemailer.createTransport(config);
|
||||
}
|
||||
|
||||
// Send email
|
||||
async function sendEmail(options) {
|
||||
const transporter = createTransporter();
|
||||
|
||||
// Verify connection
|
||||
try {
|
||||
await transporter.verify();
|
||||
console.error('SMTP server is ready to send');
|
||||
} catch (err) {
|
||||
throw new Error(`SMTP connection failed: ${err.message}`);
|
||||
}
|
||||
|
||||
const mailOptions = {
|
||||
from: options.from || process.env.SMTP_FROM || process.env.SMTP_USER,
|
||||
to: options.to,
|
||||
cc: options.cc || undefined,
|
||||
bcc: options.bcc || undefined,
|
||||
subject: options.subject || '(no subject)',
|
||||
text: options.text || undefined,
|
||||
html: options.html || undefined,
|
||||
attachments: options.attachments || [],
|
||||
};
|
||||
|
||||
// If neither text nor html provided, use default text
|
||||
if (!mailOptions.text && !mailOptions.html) {
|
||||
mailOptions.text = options.body || '';
|
||||
}
|
||||
|
||||
const info = await transporter.sendMail(mailOptions);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
messageId: info.messageId,
|
||||
response: info.response,
|
||||
to: mailOptions.to,
|
||||
};
|
||||
}
|
||||
|
||||
// Read file content for attachments
|
||||
function readAttachment(filePath) {
|
||||
validateReadPath(filePath);
|
||||
if (!fs.existsSync(filePath)) {
|
||||
throw new Error(`Attachment file not found: ${filePath}`);
|
||||
}
|
||||
return {
|
||||
filename: path.basename(filePath),
|
||||
path: path.resolve(filePath),
|
||||
};
|
||||
}
|
||||
|
||||
// Send email with file content
|
||||
async function sendEmailWithContent(options) {
|
||||
// Handle attachments
|
||||
if (options.attach) {
|
||||
const attachFiles = options.attach.split(',').map(f => f.trim());
|
||||
options.attachments = attachFiles.map(f => readAttachment(f));
|
||||
}
|
||||
|
||||
return await sendEmail(options);
|
||||
}
|
||||
|
||||
// Test SMTP connection
|
||||
async function testConnection() {
|
||||
const transporter = createTransporter();
|
||||
|
||||
try {
|
||||
await transporter.verify();
|
||||
const info = await transporter.sendMail({
|
||||
from: process.env.SMTP_FROM || process.env.SMTP_USER,
|
||||
to: process.env.SMTP_USER, // Send to self
|
||||
subject: 'SMTP Connection Test',
|
||||
text: 'This is a test email from the IMAP/SMTP email skill.',
|
||||
html: '<p>This is a <strong>test email</strong> from the IMAP/SMTP email skill.</p>',
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'SMTP connection successful',
|
||||
messageId: info.messageId,
|
||||
};
|
||||
} catch (err) {
|
||||
throw new Error(`SMTP test failed: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Main CLI handler
|
||||
async function main() {
|
||||
const { command, options, positional } = parseArgs();
|
||||
|
||||
try {
|
||||
let result;
|
||||
|
||||
switch (command) {
|
||||
case 'send':
|
||||
if (!options.to) {
|
||||
throw new Error('Missing required option: --to <email>');
|
||||
}
|
||||
if (!options.subject && !options['subject-file']) {
|
||||
throw new Error('Missing required option: --subject <text> or --subject-file <file>');
|
||||
}
|
||||
|
||||
// Read subject from file if specified
|
||||
if (options['subject-file']) {
|
||||
validateReadPath(options['subject-file']);
|
||||
options.subject = fs.readFileSync(options['subject-file'], 'utf8').trim();
|
||||
}
|
||||
|
||||
// Read body from file if specified
|
||||
if (options['body-file']) {
|
||||
validateReadPath(options['body-file']);
|
||||
const content = fs.readFileSync(options['body-file'], 'utf8');
|
||||
if (options['body-file'].endsWith('.html') || options.html) {
|
||||
options.html = content;
|
||||
} else {
|
||||
options.text = content;
|
||||
}
|
||||
} else if (options['html-file']) {
|
||||
validateReadPath(options['html-file']);
|
||||
options.html = fs.readFileSync(options['html-file'], 'utf8');
|
||||
} else if (options.body) {
|
||||
options.text = options.body;
|
||||
}
|
||||
|
||||
result = await sendEmailWithContent(options);
|
||||
break;
|
||||
|
||||
case 'test':
|
||||
result = await testConnection();
|
||||
break;
|
||||
|
||||
default:
|
||||
console.error('Unknown command:', command);
|
||||
console.error('Available commands: send, test');
|
||||
console.error('\nUsage:');
|
||||
console.error(' send --to <email> --subject <text> [--body <text>] [--html] [--cc <email>] [--bcc <email>] [--attach <file>]');
|
||||
console.error(' send --to <email> --subject <text> --body-file <file> [--html-file <file>] [--attach <file>]');
|
||||
console.error(' test Test SMTP connection');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
} catch (err) {
|
||||
console.error('Error:', err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
199
skills/imap-smtp-email/setup.sh
Normal file
199
skills/imap-smtp-email/setup.sh
Normal file
@ -0,0 +1,199 @@
|
||||
#!/bin/bash
|
||||
|
||||
# IMAP/SMTP Email Skill Setup Helper
|
||||
|
||||
echo "================================"
|
||||
echo " IMAP/SMTP Email Skill Setup"
|
||||
echo "================================"
|
||||
echo ""
|
||||
echo "This script will help you create a .env file with your email credentials."
|
||||
echo ""
|
||||
|
||||
# Prompt for email provider
|
||||
echo "Select your email provider:"
|
||||
echo " 1) Gmail"
|
||||
echo " 2) Outlook"
|
||||
echo " 3) 163.com"
|
||||
echo " 4) vip.163.com"
|
||||
echo " 5) 126.com"
|
||||
echo " 6) vip.126.com"
|
||||
echo " 7) 188.com"
|
||||
echo " 8) vip.188.com"
|
||||
echo " 9) yeah.net"
|
||||
echo " 10) QQ Mail"
|
||||
echo " 11) Custom"
|
||||
echo ""
|
||||
read -p "Enter choice (1-11): " PROVIDER_CHOICE
|
||||
|
||||
case $PROVIDER_CHOICE in
|
||||
1)
|
||||
IMAP_HOST="imap.gmail.com"
|
||||
IMAP_PORT="993"
|
||||
SMTP_HOST="smtp.gmail.com"
|
||||
SMTP_PORT="587"
|
||||
SMTP_SECURE="false"
|
||||
IMAP_TLS="true"
|
||||
echo ""
|
||||
echo "⚠️ Gmail requires an App Password — your regular Google password will NOT work."
|
||||
echo " 1. Go to: https://myaccount.google.com/apppasswords"
|
||||
echo " 2. Generate an App Password (requires 2-Step Verification enabled)"
|
||||
echo " 3. Use the generated 16-character password below"
|
||||
echo ""
|
||||
;;
|
||||
2)
|
||||
IMAP_HOST="outlook.office365.com"
|
||||
IMAP_PORT="993"
|
||||
SMTP_HOST="smtp.office365.com"
|
||||
SMTP_PORT="587"
|
||||
SMTP_SECURE="false"
|
||||
IMAP_TLS="true"
|
||||
;;
|
||||
3)
|
||||
IMAP_HOST="imap.163.com"
|
||||
IMAP_PORT="993"
|
||||
SMTP_HOST="smtp.163.com"
|
||||
SMTP_PORT="465"
|
||||
SMTP_SECURE="true"
|
||||
IMAP_TLS="true"
|
||||
;;
|
||||
4)
|
||||
IMAP_HOST="imap.vip.163.com"
|
||||
IMAP_PORT="993"
|
||||
SMTP_HOST="smtp.vip.163.com"
|
||||
SMTP_PORT="465"
|
||||
SMTP_SECURE="true"
|
||||
IMAP_TLS="true"
|
||||
;;
|
||||
5)
|
||||
IMAP_HOST="imap.126.com"
|
||||
IMAP_PORT="993"
|
||||
SMTP_HOST="smtp.126.com"
|
||||
SMTP_PORT="465"
|
||||
SMTP_SECURE="true"
|
||||
IMAP_TLS="true"
|
||||
;;
|
||||
6)
|
||||
IMAP_HOST="imap.vip.126.com"
|
||||
IMAP_PORT="993"
|
||||
SMTP_HOST="smtp.vip.126.com"
|
||||
SMTP_PORT="465"
|
||||
SMTP_SECURE="true"
|
||||
IMAP_TLS="true"
|
||||
;;
|
||||
7)
|
||||
IMAP_HOST="imap.188.com"
|
||||
IMAP_PORT="993"
|
||||
SMTP_HOST="smtp.188.com"
|
||||
SMTP_PORT="465"
|
||||
SMTP_SECURE="true"
|
||||
IMAP_TLS="true"
|
||||
;;
|
||||
8)
|
||||
IMAP_HOST="imap.vip.188.com"
|
||||
IMAP_PORT="993"
|
||||
SMTP_HOST="smtp.vip.188.com"
|
||||
SMTP_PORT="465"
|
||||
SMTP_SECURE="true"
|
||||
IMAP_TLS="true"
|
||||
;;
|
||||
9)
|
||||
IMAP_HOST="imap.yeah.net"
|
||||
IMAP_PORT="993"
|
||||
SMTP_HOST="smtp.yeah.net"
|
||||
SMTP_PORT="465"
|
||||
SMTP_SECURE="true"
|
||||
IMAP_TLS="true"
|
||||
;;
|
||||
10)
|
||||
IMAP_HOST="imap.qq.com"
|
||||
IMAP_PORT="993"
|
||||
SMTP_HOST="smtp.qq.com"
|
||||
SMTP_PORT="587"
|
||||
SMTP_SECURE="false"
|
||||
IMAP_TLS="true"
|
||||
;;
|
||||
11)
|
||||
read -p "IMAP Host: " IMAP_HOST
|
||||
read -p "IMAP Port: " IMAP_PORT
|
||||
read -p "SMTP Host: " SMTP_HOST
|
||||
read -p "SMTP Port: " SMTP_PORT
|
||||
read -p "Use TLS for IMAP? (true/false): " IMAP_TLS
|
||||
read -p "Use SSL for SMTP? (true/false): " SMTP_SECURE
|
||||
;;
|
||||
*)
|
||||
echo "Invalid choice"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ""
|
||||
read -p "Email address: " EMAIL
|
||||
read -s -p "Password / App Password / Authorization Code: " PASSWORD
|
||||
echo ""
|
||||
read -p "Accept self-signed certificates? (y/n): " ACCEPT_CERT
|
||||
if [ "$ACCEPT_CERT" = "y" ]; then
|
||||
REJECT_UNAUTHORIZED="false"
|
||||
else
|
||||
REJECT_UNAUTHORIZED="true"
|
||||
fi
|
||||
|
||||
read -p "Allowed directories for reading files (comma-separated, e.g. ~/Downloads,~/Documents): " ALLOWED_READ_DIRS
|
||||
read -p "Allowed directories for saving attachments (comma-separated, e.g. ~/Downloads): " ALLOWED_WRITE_DIRS
|
||||
|
||||
# Create .env file
|
||||
cat > .env << EOF
|
||||
# IMAP Configuration
|
||||
IMAP_HOST=$IMAP_HOST
|
||||
IMAP_PORT=$IMAP_PORT
|
||||
IMAP_USER=$EMAIL
|
||||
IMAP_PASS=$PASSWORD
|
||||
IMAP_TLS=$IMAP_TLS
|
||||
IMAP_REJECT_UNAUTHORIZED=$REJECT_UNAUTHORIZED
|
||||
IMAP_MAILBOX=INBOX
|
||||
|
||||
# SMTP Configuration
|
||||
SMTP_HOST=$SMTP_HOST
|
||||
SMTP_PORT=$SMTP_PORT
|
||||
SMTP_SECURE=$SMTP_SECURE
|
||||
SMTP_USER=$EMAIL
|
||||
SMTP_PASS=$PASSWORD
|
||||
SMTP_FROM=$EMAIL
|
||||
SMTP_REJECT_UNAUTHORIZED=$REJECT_UNAUTHORIZED
|
||||
|
||||
# File access whitelist (security)
|
||||
ALLOWED_READ_DIRS=${ALLOWED_READ_DIRS:-$HOME/Downloads,$HOME/Documents}
|
||||
ALLOWED_WRITE_DIRS=${ALLOWED_WRITE_DIRS:-$HOME/Downloads}
|
||||
EOF
|
||||
|
||||
echo ""
|
||||
echo "✅ Created .env file"
|
||||
chmod 600 .env
|
||||
echo "✅ Set .env file permissions to 600 (owner read/write only)"
|
||||
echo ""
|
||||
echo "Testing connections..."
|
||||
echo ""
|
||||
|
||||
# Test IMAP connection
|
||||
echo "Testing IMAP..."
|
||||
if node scripts/imap.js list-mailboxes >/dev/null 2>&1; then
|
||||
echo "✅ IMAP connection successful!"
|
||||
else
|
||||
echo "❌ IMAP connection test failed"
|
||||
echo " Please check your credentials and settings"
|
||||
fi
|
||||
|
||||
# Test SMTP connection
|
||||
echo ""
|
||||
echo "Testing SMTP..."
|
||||
echo " (This will send a test email to your own address: $EMAIL)"
|
||||
if node scripts/smtp.js test >/dev/null 2>&1; then
|
||||
echo "✅ SMTP connection successful!"
|
||||
else
|
||||
echo "❌ SMTP connection test failed"
|
||||
echo " Please check your credentials and settings"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Setup complete! Try:"
|
||||
echo " node scripts/imap.js check"
|
||||
echo " node scripts/smtp.js send --to recipient@example.com --subject Test --body 'Hello World'"
|
||||
23
skills/r2-upload/.gitignore
vendored
Normal file
23
skills/r2-upload/.gitignore
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
*.egg-info/
|
||||
dist/
|
||||
build/
|
||||
|
||||
# Environment
|
||||
.env
|
||||
.env.local
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
91
skills/r2-upload/SKILL.md
Normal file
91
skills/r2-upload/SKILL.md
Normal file
@ -0,0 +1,91 @@
|
||||
---
|
||||
name: r2-upload
|
||||
description: Uploads files to Cloudflare R2, AWS S3, or any S3-compatible storage and returns a public or temporary URL. Use when you need to publish assets, share files, or provide upload helpers to other skills.
|
||||
compatibility: Requires Python 3.8+ and PyYAML. Uses AWS SigV4 signing (no external SDK required).
|
||||
metadata:
|
||||
author: foundra
|
||||
version: "2.1"
|
||||
---
|
||||
|
||||
# R2 Upload
|
||||
|
||||
Upload files to R2/S3-compatible storage and return a URL.
|
||||
|
||||
## Use when
|
||||
|
||||
- Upload images, documents, or other assets to object storage
|
||||
- Generate public URLs for web/CDN use
|
||||
- Generate presigned URLs for temporary access
|
||||
- Provide upload helpers to other skills (like tech-news)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Python 3.8+ available as `python3`
|
||||
- PyYAML (`python3 -m pip install pyyaml`)
|
||||
- Config at `scripts/.r2-upload.yml` (bundled with the skill)
|
||||
- Decide the bucket, object key/path, and visibility (public vs presigned)
|
||||
- If you skip `--key`/`--key-prefix`, the default is `YYYY/MM/DD/<filename>`
|
||||
|
||||
## Recommended workflow
|
||||
|
||||
1. Confirm bucket/key and whether the URL should be public or presigned (avoid overwrites unless asked).
|
||||
2. Verify config and bucket exist.
|
||||
3. Upload with the CLI (recommended) or Python helper.
|
||||
4. Return the URL and key; note whether it is public or temporary and the expiration.
|
||||
|
||||
## Quick commands
|
||||
|
||||
```bash
|
||||
python3 scripts/r2-upload.py ./photo.jpg
|
||||
python3 scripts/r2-upload.py ./photo.jpg --key images/YYYY/MM/DD/cover.jpg
|
||||
python3 scripts/r2-upload.py ./report.pdf --key reports/YYYY/MM/DD/report.pdf
|
||||
python3 scripts/r2-upload.py ./image.png --key-prefix images/YYYY/MM/DD
|
||||
python3 scripts/r2-upload.py ./file.zip --presigned --expires 600
|
||||
```
|
||||
|
||||
## Key options
|
||||
|
||||
- `--bucket <name>`: override the default bucket in config
|
||||
- `--key <path>`: set the object key/path
|
||||
- `--key-prefix <prefix>`: prepend prefix to the local filename
|
||||
- `--expires <seconds>`: presigned URL expiration (1-604800)
|
||||
- `--timeout <seconds>`: network timeout
|
||||
- `--content-type <mime>`: override content type
|
||||
- `--cache-control <value>`: set Cache-Control header
|
||||
- `--content-disposition <value>`: set Content-Disposition header
|
||||
|
||||
## Behavior notes
|
||||
|
||||
- URL type (public vs presigned) is determined by the `public` field in bucket config.
|
||||
- If no key is provided, the default key is `YYYY/MM/DD/<filename>`.
|
||||
- Presigned URLs always use the storage endpoint (custom CDN domains are for public URLs).
|
||||
- **Local mode**: Set `mode: local` in bucket config to copy files to a local `public` directory instead of uploading to S3. Requires `endpoint` (base URL) and optionally `public_dir` (default: `public`).
|
||||
|
||||
## Programmatic usage
|
||||
|
||||
```python
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
r2_dir = Path("/path/to/r2-upload") # update to your local path
|
||||
sys.path.insert(0, str(r2_dir / "scripts"))
|
||||
|
||||
from upload import upload_file, batch_upload, fetch_and_upload
|
||||
|
||||
url = upload_file(
|
||||
local_path="./image.jpg",
|
||||
key="images/YYYY/MM/DD/image.jpg",
|
||||
make_public=True
|
||||
)
|
||||
```
|
||||
|
||||
## Scripts
|
||||
|
||||
- `scripts/r2-upload.py`: CLI upload tool
|
||||
- `scripts/upload.py`: Python helpers (`upload_file`, `batch_upload`, `fetch_and_upload`)
|
||||
|
||||
## References
|
||||
|
||||
- `references/CONFIGURATION.md` (provider config examples)
|
||||
- `references/IMAGES.md` (image workflow)
|
||||
- `references/TROUBLESHOOTING.md` (common errors)
|
||||
282
skills/r2-upload/references/CONFIGURATION.md
Normal file
282
skills/r2-upload/references/CONFIGURATION.md
Normal file
@ -0,0 +1,282 @@
|
||||
# Configuration Reference
|
||||
|
||||
Complete configuration examples for different S3-compatible storage providers.
|
||||
|
||||
> Examples use placeholder values. Replace with your own keys, bucket names, and endpoints.
|
||||
|
||||
## Table of contents
|
||||
|
||||
- [Conventions](#conventions)
|
||||
- [Cloudflare R2](#cloudflare-r2)
|
||||
- [Getting R2 credentials](#getting-r2-credentials)
|
||||
- [Custom domain (optional)](#custom-domain-optional)
|
||||
- [AWS S3](#aws-s3)
|
||||
- [S3 permissions required](#s3-permissions-required)
|
||||
- [MinIO (Self-hosted)](#minio-self-hosted)
|
||||
- [MinIO setup](#minio-setup)
|
||||
- [Backblaze B2](#backblaze-b2)
|
||||
- [B2 S3-compatible keys](#b2-s3-compatible-keys)
|
||||
- [Wasabi](#wasabi)
|
||||
- [Multiple buckets](#multiple-buckets)
|
||||
- [Local mode](#local-mode)
|
||||
- [Security best practices](#security-best-practices)
|
||||
|
||||
## Conventions
|
||||
|
||||
- `endpoint` should be the provider API base URL **without** bucket path.
|
||||
- If `public_url` is omitted, it defaults to `endpoint/<bucket_name>` (path-style).
|
||||
- For custom domains mapped to a bucket, set `public_url` to the domain root (no bucket segment).
|
||||
- Optional `session_token` is supported for temporary credentials.
|
||||
|
||||
## Cloudflare R2
|
||||
|
||||
```yaml
|
||||
default: my-r2-bucket
|
||||
|
||||
buckets:
|
||||
my-r2-bucket:
|
||||
endpoint: https://<account_id>.r2.cloudflarestorage.com
|
||||
access_key_id: your_access_key_id
|
||||
secret_access_key: your_secret_access_key
|
||||
bucket_name: my-bucket
|
||||
public_url: https://cdn.example.com # Optional: custom domain
|
||||
# session_token: optional for temporary credentials
|
||||
region: auto
|
||||
```
|
||||
|
||||
### Getting R2 credentials
|
||||
|
||||
1. Go to Cloudflare Dashboard → R2
|
||||
2. Create a bucket (or use existing one)
|
||||
3. Go to R2 API Tokens: `https://dash.cloudflare.com/<account_id>/r2/api-tokens`
|
||||
4. Create API Token:
|
||||
- **Permissions**: Object Read & Write
|
||||
- **Bucket**: Select your specific bucket (recommended for security)
|
||||
5. Copy Access Key ID and Secret Access Key
|
||||
6. Your Account ID is in the URL: `dash.cloudflare.com/<account_id>/`
|
||||
|
||||
### Custom domain (optional)
|
||||
|
||||
1. In R2 bucket settings, go to "Custom Domains"
|
||||
2. Add your domain (e.g., `cdn.example.com`)
|
||||
3. Add CNAME record pointing to your R2 bucket
|
||||
4. Wait for SSL certificate provisioning
|
||||
5. Use the custom domain as `public_url`
|
||||
|
||||
## AWS S3
|
||||
|
||||
```yaml
|
||||
default: aws-production
|
||||
|
||||
buckets:
|
||||
aws-production:
|
||||
endpoint: https://s3.us-east-1.amazonaws.com
|
||||
access_key_id: AKIAIOSFODNN7EXAMPLE
|
||||
secret_access_key: wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
|
||||
# session_token: optional for temporary credentials
|
||||
bucket_name: my-company-assets
|
||||
public_url: https://my-company-assets.s3.amazonaws.com
|
||||
region: us-east-1
|
||||
|
||||
aws-backup:
|
||||
endpoint: https://s3.eu-west-1.amazonaws.com
|
||||
access_key_id: AKIAIOSFODNN7EXAMPLE
|
||||
secret_access_key: wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
|
||||
bucket_name: my-company-backups
|
||||
region: eu-west-1
|
||||
```
|
||||
|
||||
### S3 permissions required
|
||||
|
||||
Your IAM user needs these permissions:
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"s3:PutObject",
|
||||
"s3:GetObject",
|
||||
"s3:DeleteObject"
|
||||
],
|
||||
"Resource": "arn:aws:s3:::my-bucket/*"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## MinIO (Self-hosted)
|
||||
|
||||
```yaml
|
||||
default: minio-local
|
||||
|
||||
buckets:
|
||||
minio-local:
|
||||
endpoint: http://localhost:9000
|
||||
access_key_id: minioadmin
|
||||
secret_access_key: minioadmin
|
||||
bucket_name: my-bucket
|
||||
public_url: http://localhost:9000/my-bucket
|
||||
region: us-east-1
|
||||
|
||||
minio-production:
|
||||
endpoint: https://minio.company.internal:9000
|
||||
access_key_id: service-account
|
||||
secret_access_key: super-secret-key
|
||||
bucket_name: production-assets
|
||||
public_url: https://assets.company.internal
|
||||
region: us-east-1
|
||||
```
|
||||
|
||||
### MinIO setup
|
||||
|
||||
```bash
|
||||
# Start MinIO locally
|
||||
docker run -p 9000:9000 -p 9001:9001 \
|
||||
-e "MINIO_ROOT_USER=minioadmin" \
|
||||
-e "MINIO_ROOT_PASSWORD=minioadmin" \
|
||||
minio/minio server /data --console-address ":9001"
|
||||
|
||||
# Create bucket
|
||||
mc alias set local http://localhost:9000 minioadmin minioadmin
|
||||
mc mb local/my-bucket
|
||||
```
|
||||
|
||||
## Backblaze B2
|
||||
|
||||
```yaml
|
||||
default: b2-assets
|
||||
|
||||
buckets:
|
||||
b2-assets:
|
||||
endpoint: https://s3.us-west-002.backblazeb2.com
|
||||
access_key_id: your_key_id
|
||||
secret_access_key: your_application_key
|
||||
bucket_name: my-b2-bucket
|
||||
public_url: https://f002.backblazeb2.com/file/my-b2-bucket
|
||||
region: us-west-002
|
||||
```
|
||||
|
||||
### B2 S3-compatible keys
|
||||
|
||||
1. In B2 console, go to "Application Keys"
|
||||
2. Create new key with:
|
||||
- **Type**: Application Key
|
||||
- **Allow access to**: Single bucket or all buckets
|
||||
- **Permissions**: Read and Write
|
||||
3. The "keyID" is your `access_key_id`
|
||||
4. The "applicationKey" is your `secret_access_key`
|
||||
|
||||
## Wasabi
|
||||
|
||||
```yaml
|
||||
default: wasabi-primary
|
||||
|
||||
buckets:
|
||||
wasabi-primary:
|
||||
endpoint: https://s3.us-east-1.wasabisys.com
|
||||
access_key_id: your-access-key
|
||||
secret_access_key: your-secret-key
|
||||
bucket_name: my-wasabi-bucket
|
||||
public_url: https://s3.us-east-1.wasabisys.com/my-wasabi-bucket
|
||||
region: us-east-1
|
||||
```
|
||||
|
||||
## Multiple buckets
|
||||
|
||||
You can define multiple buckets and switch between them:
|
||||
|
||||
```yaml
|
||||
default: production
|
||||
|
||||
buckets:
|
||||
production:
|
||||
endpoint: https://xxx.r2.cloudflarestorage.com
|
||||
access_key_id: prod-key
|
||||
secret_access_key: prod-secret
|
||||
bucket_name: prod-assets
|
||||
public_url: https://cdn.example.com
|
||||
region: auto
|
||||
|
||||
staging:
|
||||
endpoint: https://yyy.r2.cloudflarestorage.com
|
||||
access_key_id: staging-key
|
||||
secret_access_key: staging-secret
|
||||
bucket_name: staging-assets
|
||||
public_url: https://staging-cdn.example.com
|
||||
region: auto
|
||||
|
||||
backups:
|
||||
endpoint: https://s3.us-east-1.amazonaws.com
|
||||
access_key_id: AKIA...
|
||||
secret_access_key: ...
|
||||
bucket_name: company-backups
|
||||
region: us-east-1
|
||||
```
|
||||
|
||||
Switch buckets with `--bucket`:
|
||||
|
||||
```bash
|
||||
# Use default (production)
|
||||
python3 scripts/r2-upload.py file.jpg --public
|
||||
|
||||
# Use staging bucket
|
||||
python3 scripts/r2-upload.py file.jpg --bucket staging --public
|
||||
|
||||
# Use backups bucket
|
||||
python3 scripts/r2-upload.py backup.zip --bucket backups --key "backups/YYYY/MM/DD/backup.zip"
|
||||
```
|
||||
|
||||
## Local mode
|
||||
|
||||
Local mode copies files to a local `public` directory instead of uploading to remote storage. Useful for development and testing.
|
||||
|
||||
```yaml
|
||||
default: local-dev
|
||||
|
||||
buckets:
|
||||
local-dev:
|
||||
mode: local
|
||||
endpoint: http://localhost:8001/public # Base URL for accessing files
|
||||
public_dir: public # Target directory (relative to cwd or absolute)
|
||||
```
|
||||
|
||||
### Configuration fields
|
||||
|
||||
| Field | Required | Description |
|
||||
|---|---|---|
|
||||
| `mode` | Yes | Must be `local` |
|
||||
| `endpoint` | Yes | Base URL used to build the returned file URL |
|
||||
| `public_dir` | No | Directory to copy files into (default: `public`) |
|
||||
|
||||
### How it works
|
||||
|
||||
1. File is copied to `<public_dir>/<key>` (directories are created automatically)
|
||||
2. Returns `<endpoint>/<key>` as the URL
|
||||
|
||||
### Example
|
||||
|
||||
```bash
|
||||
# Upload to local public directory
|
||||
python3 scripts/r2-upload.py ./photo.jpg --bucket local-dev
|
||||
|
||||
# With custom key
|
||||
python3 scripts/r2-upload.py ./photo.jpg --bucket local-dev --key images/photo.jpg
|
||||
# File copied to: public/images/photo.jpg
|
||||
# Returns: http://localhost:8001/public/images/photo.jpg
|
||||
```
|
||||
|
||||
## Security best practices
|
||||
|
||||
1. **File permissions**: Set config file to 600
|
||||
```bash
|
||||
chmod 600 scripts/.r2-upload.yml
|
||||
```
|
||||
|
||||
2. **Separate credentials**: Use different keys for different environments
|
||||
|
||||
3. **Least privilege**: Grant only `PutObject`, `GetObject`, `DeleteObject` permissions
|
||||
|
||||
4. **Rotate keys**: Regularly rotate access keys, especially for production
|
||||
405
skills/r2-upload/references/TROUBLESHOOTING.md
Normal file
405
skills/r2-upload/references/TROUBLESHOOTING.md
Normal file
@ -0,0 +1,405 @@
|
||||
# Troubleshooting
|
||||
|
||||
Common issues and their solutions.
|
||||
|
||||
## Table of contents
|
||||
|
||||
- [HTTP 403 Forbidden](#http-403-forbidden)
|
||||
- [HTTP 400 Bad Request](#http-400-bad-request)
|
||||
- [HTTP 404 Not Found](#http-404-not-found)
|
||||
- [Connection timeout](#connection-timeout)
|
||||
- [Config file not found](#config-file-not-found)
|
||||
- [PyYAML missing](#pyyaml-missing)
|
||||
- [Invalid YAML syntax](#invalid-yaml-syntax)
|
||||
- [Upload succeeds but file not accessible](#upload-succeeds-but-file-not-accessible)
|
||||
- [Presigned URL expired](#presigned-url-expired)
|
||||
- [Large file uploads fail](#large-file-uploads-fail)
|
||||
- [SSL/TLS errors](#ssltls-errors)
|
||||
- [Debugging tips](#debugging-tips)
|
||||
- [Getting help](#getting-help)
|
||||
|
||||
## HTTP 403 Forbidden
|
||||
|
||||
### Symptoms
|
||||
```
|
||||
Error: HTTP 403: <?xml version="1.0" ?>
|
||||
<Error><Code>AccessDenied</Code>...</Error>
|
||||
```
|
||||
|
||||
### Causes and solutions
|
||||
|
||||
**1. Invalid credentials**
|
||||
- Check `access_key_id` and `secret_access_key` in config
|
||||
- For R2: regenerate API token in Cloudflare dashboard
|
||||
- For AWS: verify IAM user has correct permissions
|
||||
|
||||
**2. Bucket doesn't exist**
|
||||
- Verify `bucket_name` matches actual bucket name (case-sensitive)
|
||||
- Create bucket if it doesn't exist
|
||||
|
||||
**3. Insufficient permissions**
|
||||
R2: Ensure API token has "Object Read & Write" permission for the specific bucket
|
||||
|
||||
AWS: IAM policy needs these permissions:
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": ["s3:PutObject", "s3:GetObject"],
|
||||
"Resource": "arn:aws:s3:::YOUR_BUCKET/*"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**4. Wrong endpoint**
|
||||
- R2: Should be `https://<account_id>.r2.cloudflarestorage.com`
|
||||
- AWS: Should be `https://s3.<region>.amazonaws.com`
|
||||
|
||||
## HTTP 400 Bad Request
|
||||
|
||||
### Symptoms
|
||||
```
|
||||
Error: HTTP 400: <Error><Code>InvalidRequest</Code>...</Error>
|
||||
```
|
||||
|
||||
### Causes and solutions
|
||||
|
||||
**1. Wrong endpoint format**
|
||||
```yaml
|
||||
# Wrong
|
||||
endpoint: https://xxx.r2.cloudflarestorage.com/my-bucket
|
||||
|
||||
# Correct
|
||||
endpoint: https://xxx.r2.cloudflarestorage.com
|
||||
bucket_name: my-bucket
|
||||
```
|
||||
|
||||
**2. Wrong region**
|
||||
- R2: Use `region: auto`
|
||||
- AWS: Use actual region like `us-east-1`, `eu-west-1`
|
||||
|
||||
**3. Missing required headers**
|
||||
- Usually internal error, check script version
|
||||
|
||||
## HTTP 404 Not Found
|
||||
|
||||
### Symptoms
|
||||
```
|
||||
Error: HTTP 404: <Error><Code>NoSuchBucket</Code>...</Error>
|
||||
```
|
||||
|
||||
### Solution
|
||||
Bucket doesn't exist. Create it first:
|
||||
|
||||
**R2:**
|
||||
```bash
|
||||
# Via Cloudflare dashboard
|
||||
# Or using wrangler:
|
||||
npx wrangler r2 bucket create my-bucket
|
||||
```
|
||||
|
||||
**AWS:**
|
||||
```bash
|
||||
aws s3 mb s3://my-bucket --region us-east-1
|
||||
```
|
||||
|
||||
**MinIO:**
|
||||
```bash
|
||||
mc mb local/my-bucket
|
||||
```
|
||||
|
||||
## Connection timeout
|
||||
|
||||
### Symptoms
|
||||
```
|
||||
urllib.error.URLError: <urlopen error [Errno 110] Connection timed out>
|
||||
```
|
||||
|
||||
### Causes and solutions
|
||||
|
||||
**1. Network issue**
|
||||
```bash
|
||||
# Test connectivity
|
||||
curl -I https://xxx.r2.cloudflarestorage.com
|
||||
|
||||
# Test with longer timeout
|
||||
python3 scripts/r2-upload.py file.jpg --timeout 60
|
||||
```
|
||||
|
||||
**2. Firewall blocking**
|
||||
- Ensure outbound HTTPS (port 443) is allowed
|
||||
- For corporate networks, check proxy settings
|
||||
|
||||
**3. DNS resolution failure**
|
||||
```bash
|
||||
# Check DNS
|
||||
nslookup xxx.r2.cloudflarestorage.com
|
||||
|
||||
# Use IP if needed (not recommended for production)
|
||||
```
|
||||
|
||||
**4. Local MinIO not running**
|
||||
```bash
|
||||
# Check if MinIO is up
|
||||
docker ps | grep minio
|
||||
|
||||
# Start if needed
|
||||
docker start minio
|
||||
```
|
||||
|
||||
## Config file not found
|
||||
|
||||
### Symptoms
|
||||
```
|
||||
FileNotFoundError: Config file not found: ~/.r2-upload.yml
|
||||
```
|
||||
|
||||
### Solutions
|
||||
|
||||
**1. Create config file**
|
||||
```bash
|
||||
cat > ~/.r2-upload.yml << 'EOF'
|
||||
default: my-bucket
|
||||
|
||||
buckets:
|
||||
my-bucket:
|
||||
endpoint: https://xxx.r2.cloudflarestorage.com
|
||||
access_key_id: your_key
|
||||
secret_access_key: your_secret
|
||||
bucket_name: my-bucket
|
||||
region: auto
|
||||
EOF
|
||||
|
||||
chmod 600 ~/.r2-upload.yml
|
||||
```
|
||||
|
||||
**2. Use custom path**
|
||||
```bash
|
||||
export R2_UPLOAD_CONFIG=/path/to/custom-config.yml
|
||||
python3 scripts/r2-upload.py file.jpg
|
||||
```
|
||||
|
||||
**3. Check file permissions**
|
||||
```bash
|
||||
# Should be readable by current user
|
||||
ls -la ~/.r2-upload.yml
|
||||
|
||||
# Fix if needed
|
||||
chmod 600 ~/.r2-upload.yml
|
||||
```
|
||||
|
||||
## PyYAML missing
|
||||
|
||||
### Symptoms
|
||||
```
|
||||
Error: PyYAML is required. Install with: python3 -m pip install pyyaml
|
||||
```
|
||||
|
||||
### Solution
|
||||
```bash
|
||||
python3 -m pip install pyyaml
|
||||
```
|
||||
|
||||
## Invalid YAML syntax
|
||||
|
||||
### Symptoms
|
||||
```
|
||||
yaml.scanner.ScannerError: mapping values are not allowed here
|
||||
```
|
||||
|
||||
### Solutions
|
||||
|
||||
**1. Validate YAML**
|
||||
```bash
|
||||
# Using Python
|
||||
python3 -c "import os,yaml; yaml.safe_load(open(os.path.expanduser('~/.r2-upload.yml')))"
|
||||
|
||||
# Using yamllint
|
||||
yamllint ~/.r2-upload.yml
|
||||
```
|
||||
|
||||
**2. Common mistakes**
|
||||
```yaml
|
||||
# Wrong - tabs not allowed
|
||||
buckets:
|
||||
my-bucket:
|
||||
|
||||
# Correct - use spaces
|
||||
buckets:
|
||||
my-bucket:
|
||||
|
||||
# Wrong - missing space after colon
|
||||
endpoint:https://...
|
||||
|
||||
# Correct
|
||||
endpoint: https://...
|
||||
```
|
||||
|
||||
## Upload succeeds but file not accessible
|
||||
|
||||
### Symptoms
|
||||
Upload returns URL, but accessing it gives 403/404
|
||||
|
||||
### Causes
|
||||
|
||||
**1. Public URL misconfigured**
|
||||
```yaml
|
||||
# Wrong - missing custom domain
|
||||
public_url: https://xxx.r2.cloudflarestorage.com
|
||||
|
||||
# Correct - if using custom domain
|
||||
public_url: https://cdn.example.com
|
||||
|
||||
# Or if accessing directly
|
||||
public_url: https://xxx.r2.cloudflarestorage.com
|
||||
```
|
||||
|
||||
**2. Public vs presigned URL mismatch**
|
||||
The CLI returns a **presigned URL by default**. If you need a CDN/public URL, pass `--public`.
|
||||
|
||||
If your bucket is private, presigned URLs are the correct approach. Public URLs will 403 unless the bucket is public or behind a CDN that serves it.
|
||||
|
||||
**3. Wrong bucket in URL**
|
||||
Check that `public_url` includes the bucket name if needed:
|
||||
```yaml
|
||||
# For R2 with custom domain
|
||||
public_url: https://cdn.example.com
|
||||
# Final URL: https://cdn.example.com/images/file.jpg
|
||||
|
||||
# For R2 without custom domain
|
||||
public_url: https://xxx.r2.cloudflarestorage.com/my-bucket
|
||||
# Final URL: https://xxx.r2.cloudflarestorage.com/my-bucket/images/file.jpg
|
||||
```
|
||||
|
||||
## Large file uploads fail
|
||||
|
||||
### Symptoms
|
||||
```
|
||||
MemoryError
|
||||
# or
|
||||
Error: Request Entity Too Large
|
||||
```
|
||||
|
||||
### Solutions
|
||||
|
||||
**1. Script reads files into memory**
|
||||
This tool loads the entire file into memory. For very large files, use a provider SDK or CLI with multipart upload.
|
||||
|
||||
**2. Stream upload for large files (advanced)**
|
||||
Modify script to use streaming:
|
||||
```python
|
||||
# Instead of reading entire file
|
||||
data = f.read()
|
||||
|
||||
# Use streaming upload
|
||||
req = urllib.request.Request(..., data=f, method='PUT')
|
||||
```
|
||||
|
||||
**2. Multipart upload for very large files**
|
||||
For files > 100MB, use S3 multipart upload API
|
||||
|
||||
**3. Increase memory (if applicable)**
|
||||
```bash
|
||||
ulimit -m 1048576 # 1GB
|
||||
```
|
||||
|
||||
## SSL/TLS errors
|
||||
|
||||
### Symptoms
|
||||
```
|
||||
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED]
|
||||
```
|
||||
|
||||
### Solutions
|
||||
|
||||
**1. Update CA certificates**
|
||||
```bash
|
||||
# macOS
|
||||
brew install ca-certificates
|
||||
|
||||
# Ubuntu/Debian
|
||||
sudo apt-get update && sudo apt-get install ca-certificates
|
||||
|
||||
# RHEL/CentOS
|
||||
sudo yum install ca-certificates
|
||||
```
|
||||
|
||||
**2. Self-signed certificates (MinIO)**
|
||||
```python
|
||||
import ssl
|
||||
ctx = ssl.create_default_context()
|
||||
ctx.check_hostname = False
|
||||
ctx.verify_mode = ssl.CERT_NONE
|
||||
|
||||
# Pass context to urlopen
|
||||
urllib.request.urlopen(req, context=ctx)
|
||||
```
|
||||
|
||||
## Presigned URL expired
|
||||
|
||||
### Symptoms
|
||||
```
|
||||
AccessDenied: Request has expired
|
||||
```
|
||||
|
||||
### Solutions
|
||||
|
||||
- Increase expiration time (max 7 days):
|
||||
```bash
|
||||
python3 scripts/r2-upload.py file.jpg --expires 3600
|
||||
```
|
||||
- Generate a fresh presigned URL
|
||||
|
||||
## Debugging tips
|
||||
|
||||
### Enable verbose logging
|
||||
|
||||
```python
|
||||
import http.client
|
||||
http.client.HTTPConnection.debuglevel = 1
|
||||
```
|
||||
|
||||
### Prefer provider CLIs for validation
|
||||
|
||||
```bash
|
||||
# AWS CLI (works for AWS S3 and many S3-compatible providers)
|
||||
aws s3 cp test.jpg s3://my-bucket/
|
||||
```
|
||||
|
||||
### Check request signature manually
|
||||
|
||||
```python
|
||||
import hashlib
|
||||
import hmac
|
||||
import datetime
|
||||
|
||||
# Print canonical request and string to sign
|
||||
# (Add print statements in upload.py)
|
||||
print("Canonical Request:", canonical_request)
|
||||
print("String to Sign:", string_to_sign)
|
||||
print("Signature:", signature)
|
||||
```
|
||||
|
||||
## Getting help
|
||||
|
||||
If issues persist:
|
||||
|
||||
1. **Check provider status**
|
||||
- R2: https://www.cloudflarestatus.com/
|
||||
- AWS: https://status.aws.amazon.com/
|
||||
|
||||
2. **Verify with official CLI**
|
||||
```bash
|
||||
# Install AWS CLI
|
||||
pip install awscli
|
||||
|
||||
# Configure and test
|
||||
aws configure
|
||||
aws s3 cp test.jpg s3://my-bucket/
|
||||
```
|
||||
|
||||
3. **Enable request logging in script**
|
||||
Add temporary print statements for canonical request and string-to-sign
|
||||
17
skills/r2-upload/scripts/.r2-upload.yml
Normal file
17
skills/r2-upload/scripts/.r2-upload.yml
Normal file
@ -0,0 +1,17 @@
|
||||
# Default bucket (used when no bucket specified)
|
||||
default: local
|
||||
|
||||
# Bucket configurations
|
||||
buckets:
|
||||
local:
|
||||
mode: local
|
||||
endpoint: http://localhost:8001/public
|
||||
public_dir: public # relative to cwd, or use absolute path
|
||||
|
||||
# s3:
|
||||
# endpoint: https://s3.ap-northeast-1.amazonaws.com
|
||||
# access_key_id: xxx
|
||||
# secret_access_key: xxx
|
||||
# bucket_name: sparticle-agent
|
||||
# region: ap-northeast-1
|
||||
# public: false
|
||||
75
skills/r2-upload/scripts/r2-upload.py
Normal file
75
skills/r2-upload/scripts/r2-upload.py
Normal file
@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
R2 Upload Script - Command line tool for uploading files to R2/S3
|
||||
Usage: python3 r2-upload.py <file> [--key path] [--bucket name] [--public]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from upload import R2UploadError, upload_file
|
||||
|
||||
|
||||
def build_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Upload files to R2/S3-compatible storage and return a URL. "
|
||||
"If no key is provided, uses YYYY/MM/DD/<filename>."
|
||||
),
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=(
|
||||
"Examples:\n"
|
||||
" python3 scripts/r2-upload.py ./photo.jpg --public\n"
|
||||
" python3 scripts/r2-upload.py ./photo.jpg --key images/2026/02/05/photo.jpg --public\n"
|
||||
" python3 scripts/r2-upload.py ./report.pdf --key reports/2026/02/05/report.pdf\n"
|
||||
" python3 scripts/r2-upload.py ./image.png --key-prefix images/2026/02/05 --public\n"
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument("file", help="File to upload")
|
||||
parser.add_argument("--key", help="Custom key/path for the file")
|
||||
parser.add_argument("--key-prefix", help="Prefix to prepend to the file name")
|
||||
parser.add_argument("--bucket", help="Bucket to use")
|
||||
parser.add_argument("--expires", type=int, default=300, help="Presigned URL expiration (seconds)")
|
||||
parser.add_argument("--content-type", help="Override content type")
|
||||
parser.add_argument("--cache-control", help="Set Cache-Control header")
|
||||
parser.add_argument("--content-disposition", help="Set Content-Disposition header")
|
||||
parser.add_argument("--timeout", type=int, default=60, help="Request timeout (seconds)")
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = build_parser()
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.key and args.key_prefix:
|
||||
parser.error("--key and --key-prefix are mutually exclusive")
|
||||
|
||||
if not os.path.isfile(args.file):
|
||||
parser.error(f"File not found: {args.file}")
|
||||
|
||||
try:
|
||||
url = upload_file(
|
||||
local_path=args.file,
|
||||
key=args.key,
|
||||
key_prefix=args.key_prefix,
|
||||
bucket=args.bucket,
|
||||
expires=args.expires,
|
||||
timeout=args.timeout,
|
||||
content_type=args.content_type,
|
||||
cache_control=args.cache_control,
|
||||
content_disposition=args.content_disposition,
|
||||
)
|
||||
print(url)
|
||||
except R2UploadError as exc:
|
||||
print(f"Error: {exc}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except Exception as exc:
|
||||
print(f"Unexpected error: {exc}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
528
skills/r2-upload/scripts/upload.py
Normal file
528
skills/r2-upload/scripts/upload.py
Normal file
@ -0,0 +1,528 @@
|
||||
"""
|
||||
R2 Upload Library - Python module for integration into other skills.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import hmac
|
||||
import mimetypes
|
||||
import os
|
||||
import shutil
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, Optional, Tuple
|
||||
from urllib.parse import quote, urlparse
|
||||
|
||||
DEFAULT_TIMEOUT = 60
|
||||
MAX_PRESIGN_EXPIRES = 604800 # 7 days
|
||||
REQUIRED_BUCKET_FIELDS = ("endpoint", "access_key_id", "secret_access_key", "bucket_name")
|
||||
REQUIRED_LOCAL_FIELDS = ("endpoint",)
|
||||
DEFAULT_USER_AGENT = "r2-upload/2.1"
|
||||
DEFAULT_DATE_FORMAT = "%Y/%m/%d"
|
||||
|
||||
|
||||
class R2UploadError(RuntimeError):
|
||||
"""Raised when upload or configuration fails."""
|
||||
|
||||
|
||||
def load_config(config_path: Optional[str] = None) -> dict:
|
||||
"""Load R2/S3 configuration from YAML file."""
|
||||
try:
|
||||
import yaml
|
||||
except ImportError as exc:
|
||||
raise R2UploadError("PyYAML is required. Install with: python3 -m pip install pyyaml") from exc
|
||||
|
||||
if config_path is None:
|
||||
config_path = os.environ.get("R2_UPLOAD_CONFIG") or os.path.join(os.path.dirname(os.path.abspath(__file__)), ".r2-upload.yml")
|
||||
path = config_path
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise R2UploadError(f"Config file not found: {path}")
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
config = yaml.safe_load(f)
|
||||
except yaml.YAMLError as exc:
|
||||
raise R2UploadError(f"Invalid YAML in config: {path}") from exc
|
||||
|
||||
if not isinstance(config, dict):
|
||||
raise R2UploadError("Config must be a YAML mapping with a 'buckets' key")
|
||||
|
||||
buckets = config.get("buckets")
|
||||
if not isinstance(buckets, dict) or not buckets:
|
||||
raise R2UploadError("Config missing 'buckets' mapping")
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def resolve_bucket_config(config: dict, bucket: Optional[str] = None) -> Tuple[str, dict]:
|
||||
"""Return (bucket_name, bucket_config) after validating required fields."""
|
||||
bucket_name = bucket or config.get("default")
|
||||
if not bucket_name:
|
||||
raise R2UploadError("No bucket specified and no 'default' bucket in config")
|
||||
|
||||
buckets = config.get("buckets", {})
|
||||
bucket_config = buckets.get(bucket_name)
|
||||
if not bucket_config:
|
||||
available = ", ".join(sorted(buckets.keys()))
|
||||
raise R2UploadError(f"Bucket '{bucket_name}' not found. Available: {available}")
|
||||
|
||||
is_local = bucket_config.get("mode") == "local"
|
||||
required = REQUIRED_LOCAL_FIELDS if is_local else REQUIRED_BUCKET_FIELDS
|
||||
missing = [field for field in required if not bucket_config.get(field)]
|
||||
if missing:
|
||||
raise R2UploadError(
|
||||
f"Bucket '{bucket_name}' missing required fields: {', '.join(missing)}"
|
||||
)
|
||||
|
||||
return bucket_name, bucket_config
|
||||
|
||||
|
||||
def _normalize_endpoint(endpoint: str) -> Tuple[str, str]:
|
||||
endpoint = endpoint.strip().rstrip("/")
|
||||
if not endpoint.startswith("http://") and not endpoint.startswith("https://"):
|
||||
endpoint = f"https://{endpoint}"
|
||||
|
||||
parsed = urlparse(endpoint)
|
||||
if not parsed.netloc:
|
||||
raise R2UploadError(f"Invalid endpoint: {endpoint}")
|
||||
if parsed.path not in ("", "/"):
|
||||
raise R2UploadError("Endpoint should not include a path. Use bucket_name/public_url instead.")
|
||||
|
||||
return f"{parsed.scheme}://{parsed.netloc}", parsed.netloc
|
||||
|
||||
|
||||
def _normalize_key(key: str) -> str:
|
||||
key = key.replace("\\", "/").lstrip("/")
|
||||
return key
|
||||
|
||||
|
||||
def _join_url(base: str, key: str) -> str:
|
||||
base = base.rstrip("/")
|
||||
key = _normalize_key(key)
|
||||
return f"{base}/{key}"
|
||||
|
||||
|
||||
def _guess_content_type(name: str, override: Optional[str] = None) -> str:
|
||||
if override:
|
||||
return override
|
||||
|
||||
mime, _ = mimetypes.guess_type(name)
|
||||
if mime:
|
||||
return mime
|
||||
|
||||
ext = os.path.splitext(name)[1].lower()
|
||||
fallback = {
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".png": "image/png",
|
||||
".gif": "image/gif",
|
||||
".webp": "image/webp",
|
||||
".svg": "image/svg+xml",
|
||||
".pdf": "application/pdf",
|
||||
".md": "text/markdown",
|
||||
".yml": "text/yaml",
|
||||
".yaml": "text/yaml",
|
||||
".json": "application/json",
|
||||
".txt": "text/plain",
|
||||
}
|
||||
return fallback.get(ext, "application/octet-stream")
|
||||
|
||||
|
||||
def _aws_encode_uri(value: str) -> str:
|
||||
return quote(value, safe="/~")
|
||||
|
||||
|
||||
def _aws_encode_query_param(value: str) -> str:
|
||||
return quote(str(value), safe="-_.~")
|
||||
|
||||
|
||||
def _normalize_header_value(value: str) -> str:
|
||||
return " ".join(str(value).strip().split())
|
||||
|
||||
|
||||
def sign(key: bytes, msg: str) -> bytes:
|
||||
return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest()
|
||||
|
||||
|
||||
def get_signature_key(secret: str, date_stamp: str, region: str, service: str) -> bytes:
|
||||
k_date = sign(("AWS4" + secret).encode("utf-8"), date_stamp)
|
||||
k_region = sign(k_date, region)
|
||||
k_service = sign(k_region, service)
|
||||
k_signing = sign(k_service, "aws4_request")
|
||||
return k_signing
|
||||
|
||||
|
||||
def _validate_expires(expires: int) -> int:
|
||||
try:
|
||||
expires = int(expires)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise R2UploadError("expires must be an integer (seconds)") from exc
|
||||
|
||||
if expires < 1 or expires > MAX_PRESIGN_EXPIRES:
|
||||
raise R2UploadError(
|
||||
f"expires must be between 1 and {MAX_PRESIGN_EXPIRES} seconds"
|
||||
)
|
||||
return expires
|
||||
|
||||
|
||||
def default_key_prefix(now: Optional[datetime.datetime] = None) -> str:
|
||||
"""Return default prefix (local date, YYYY/MM/DD)."""
|
||||
if now is None:
|
||||
now = datetime.datetime.now()
|
||||
return now.strftime(DEFAULT_DATE_FORMAT)
|
||||
|
||||
|
||||
def build_public_url(key: str, bucket_config: dict) -> str:
|
||||
"""Return a public URL for the object key."""
|
||||
endpoint, _ = _normalize_endpoint(bucket_config["endpoint"])
|
||||
public_base = bucket_config.get("public_url")
|
||||
if not public_base:
|
||||
public_base = f"{endpoint}/{bucket_config['bucket_name']}"
|
||||
return _join_url(public_base, key)
|
||||
|
||||
|
||||
def generate_presigned_url(key: str, bucket_config: dict, expires: int = 300) -> str:
|
||||
"""Generate a presigned GET URL for temporary access."""
|
||||
key = _normalize_key(key)
|
||||
expires = _validate_expires(expires)
|
||||
|
||||
endpoint, host = _normalize_endpoint(bucket_config["endpoint"])
|
||||
access_key = bucket_config["access_key_id"]
|
||||
secret_key = bucket_config["secret_access_key"]
|
||||
bucket_name = bucket_config["bucket_name"]
|
||||
region = bucket_config.get("region", "auto")
|
||||
session_token = bucket_config.get("session_token")
|
||||
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
date_stamp = now.strftime("%Y%m%d")
|
||||
amz_date = now.strftime("%Y%m%dT%H%M%SZ")
|
||||
|
||||
canonical_uri = f"/{bucket_name}/{_aws_encode_uri(key)}"
|
||||
credential_scope = f"{date_stamp}/{region}/s3/aws4_request"
|
||||
|
||||
params = {
|
||||
"X-Amz-Algorithm": "AWS4-HMAC-SHA256",
|
||||
"X-Amz-Credential": f"{access_key}/{credential_scope}",
|
||||
"X-Amz-Date": amz_date,
|
||||
"X-Amz-Expires": str(expires),
|
||||
"X-Amz-SignedHeaders": "host",
|
||||
}
|
||||
if session_token:
|
||||
params["X-Amz-Security-Token"] = session_token
|
||||
|
||||
canonical_query = "&".join(
|
||||
f"{_aws_encode_query_param(k)}={_aws_encode_query_param(v)}"
|
||||
for k, v in sorted(params.items())
|
||||
)
|
||||
|
||||
canonical_request = (
|
||||
f"GET\n{canonical_uri}\n{canonical_query}\n"
|
||||
f"host:{host}\n\n"
|
||||
"host\nUNSIGNED-PAYLOAD"
|
||||
)
|
||||
|
||||
string_to_sign = (
|
||||
"AWS4-HMAC-SHA256\n"
|
||||
f"{amz_date}\n"
|
||||
f"{credential_scope}\n"
|
||||
f"{hashlib.sha256(canonical_request.encode('utf-8')).hexdigest()}"
|
||||
)
|
||||
|
||||
signing_key = get_signature_key(secret_key, date_stamp, region, "s3")
|
||||
signature = hmac.new(
|
||||
signing_key, string_to_sign.encode("utf-8"), hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
presigned_query = f"{canonical_query}&X-Amz-Signature={signature}"
|
||||
return f"{endpoint}/{bucket_name}/{_aws_encode_uri(key)}?{presigned_query}"
|
||||
|
||||
|
||||
def _upload_local(data: bytes, key: str, bucket_config: dict) -> str:
|
||||
"""Upload by copying file to local public directory."""
|
||||
public_dir = bucket_config.get("public_dir", "public")
|
||||
if not os.path.isabs(public_dir):
|
||||
# Resolve relative to current working directory
|
||||
public_dir = os.path.join(os.getcwd(), public_dir)
|
||||
|
||||
dest_path = os.path.join(public_dir, key)
|
||||
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
|
||||
|
||||
with open(dest_path, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
endpoint = bucket_config["endpoint"].rstrip("/")
|
||||
return f"{endpoint}/{key}"
|
||||
|
||||
|
||||
def upload_bytes(
|
||||
data: bytes,
|
||||
key: Optional[str] = None,
|
||||
bucket: Optional[str] = None,
|
||||
make_public: Optional[bool] = None,
|
||||
config: Optional[dict] = None,
|
||||
*,
|
||||
config_path: Optional[str] = None,
|
||||
expires: int = 300,
|
||||
timeout: int = DEFAULT_TIMEOUT,
|
||||
content_type: Optional[str] = None,
|
||||
cache_control: Optional[str] = None,
|
||||
content_disposition: Optional[str] = None,
|
||||
) -> str:
|
||||
"""Upload in-memory bytes and return a URL (default key: YYYY/MM/DD/upload-<id>.bin)."""
|
||||
if config is None:
|
||||
config = load_config(config_path)
|
||||
|
||||
bucket_name, bucket_config = resolve_bucket_config(config, bucket)
|
||||
if make_public is None:
|
||||
make_public = bucket_config.get("public", False)
|
||||
if key is None:
|
||||
prefix = default_key_prefix()
|
||||
key = f"{prefix}/upload-{uuid.uuid4().hex[:8]}.bin"
|
||||
key = _normalize_key(key)
|
||||
|
||||
if bucket_config.get("mode") == "local":
|
||||
return _upload_local(data, key, bucket_config)
|
||||
|
||||
endpoint, host = _normalize_endpoint(bucket_config["endpoint"])
|
||||
access_key = bucket_config["access_key_id"]
|
||||
secret_key = bucket_config["secret_access_key"]
|
||||
region = bucket_config.get("region", "auto")
|
||||
session_token = bucket_config.get("session_token")
|
||||
|
||||
content_type = _guess_content_type(key, content_type)
|
||||
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
date_stamp = now.strftime("%Y%m%d")
|
||||
amz_date = now.strftime("%Y%m%dT%H%M%SZ")
|
||||
|
||||
payload_hash = hashlib.sha256(data).hexdigest()
|
||||
|
||||
headers: Dict[str, str] = {
|
||||
"host": host,
|
||||
"x-amz-content-sha256": payload_hash,
|
||||
"x-amz-date": amz_date,
|
||||
"content-type": content_type,
|
||||
}
|
||||
if cache_control:
|
||||
headers["cache-control"] = cache_control
|
||||
if content_disposition:
|
||||
headers["content-disposition"] = content_disposition
|
||||
if session_token:
|
||||
headers["x-amz-security-token"] = session_token
|
||||
|
||||
canonical_headers = "".join(
|
||||
f"{k}:{_normalize_header_value(v)}\n" for k, v in sorted(headers.items())
|
||||
)
|
||||
signed_headers = ";".join(sorted(headers.keys()))
|
||||
canonical_uri = f"/{bucket_name}/{_aws_encode_uri(key)}"
|
||||
|
||||
canonical_request = (
|
||||
f"PUT\n{canonical_uri}\n\n{canonical_headers}\n{signed_headers}\n{payload_hash}"
|
||||
)
|
||||
|
||||
credential_scope = f"{date_stamp}/{region}/s3/aws4_request"
|
||||
string_to_sign = (
|
||||
"AWS4-HMAC-SHA256\n"
|
||||
f"{amz_date}\n"
|
||||
f"{credential_scope}\n"
|
||||
f"{hashlib.sha256(canonical_request.encode('utf-8')).hexdigest()}"
|
||||
)
|
||||
|
||||
signing_key = get_signature_key(secret_key, date_stamp, region, "s3")
|
||||
signature = hmac.new(
|
||||
signing_key, string_to_sign.encode("utf-8"), hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
auth_header = (
|
||||
"AWS4-HMAC-SHA256 "
|
||||
f"Credential={access_key}/{credential_scope}, "
|
||||
f"SignedHeaders={signed_headers}, "
|
||||
f"Signature={signature}"
|
||||
)
|
||||
|
||||
url = f"{endpoint}/{bucket_name}/{_aws_encode_uri(key)}"
|
||||
req = urllib.request.Request(
|
||||
url,
|
||||
data=data,
|
||||
method="PUT",
|
||||
headers={**headers, "Authorization": auth_header},
|
||||
)
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=timeout) as resp:
|
||||
if resp.status not in (200, 201, 204):
|
||||
raise R2UploadError(f"Upload failed: HTTP {resp.status}")
|
||||
except urllib.error.HTTPError as exc:
|
||||
body = ""
|
||||
try:
|
||||
body = exc.read().decode("utf-8", errors="ignore")
|
||||
except Exception:
|
||||
body = ""
|
||||
message = f"HTTP {exc.code}: {exc.reason}"
|
||||
if body:
|
||||
message = f"{message} - {body.strip()[:500]}"
|
||||
raise R2UploadError(message) from exc
|
||||
except urllib.error.URLError as exc:
|
||||
raise R2UploadError(f"Network error: {exc.reason}") from exc
|
||||
|
||||
if make_public:
|
||||
return build_public_url(key, bucket_config)
|
||||
|
||||
return generate_presigned_url(key, bucket_config, expires=expires)
|
||||
|
||||
|
||||
def upload_file(
|
||||
local_path: str,
|
||||
key: Optional[str] = None,
|
||||
bucket: Optional[str] = None,
|
||||
make_public: Optional[bool] = None,
|
||||
config: Optional[dict] = None,
|
||||
*,
|
||||
config_path: Optional[str] = None,
|
||||
key_prefix: Optional[str] = None,
|
||||
expires: int = 300,
|
||||
timeout: int = DEFAULT_TIMEOUT,
|
||||
content_type: Optional[str] = None,
|
||||
cache_control: Optional[str] = None,
|
||||
content_disposition: Optional[str] = None,
|
||||
) -> str:
|
||||
"""
|
||||
Upload a file to R2/S3 storage.
|
||||
|
||||
If key is omitted, uses YYYY/MM/DD/<filename>.
|
||||
|
||||
Returns:
|
||||
URL string: public URL if make_public, otherwise presigned URL.
|
||||
"""
|
||||
if key and key_prefix:
|
||||
raise R2UploadError("Provide either key or key_prefix, not both")
|
||||
|
||||
if key is None:
|
||||
filename = os.path.basename(local_path)
|
||||
resolved_prefix = default_key_prefix() if key_prefix is None else key_prefix
|
||||
prefix = resolved_prefix.strip("/")
|
||||
key = f"{prefix}/{filename}" if prefix else filename
|
||||
|
||||
if config is None:
|
||||
config = load_config(config_path)
|
||||
|
||||
_, bucket_config = resolve_bucket_config(config, bucket)
|
||||
|
||||
if bucket_config.get("mode") == "local":
|
||||
key = _normalize_key(key)
|
||||
public_dir = bucket_config.get("public_dir", "public")
|
||||
if not os.path.isabs(public_dir):
|
||||
public_dir = os.path.join(os.getcwd(), public_dir)
|
||||
dest_path = os.path.join(public_dir, key)
|
||||
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
|
||||
shutil.copy2(local_path, dest_path)
|
||||
endpoint = bucket_config["endpoint"].rstrip("/")
|
||||
return f"{endpoint}/{key}"
|
||||
|
||||
with open(local_path, "rb") as f:
|
||||
data = f.read()
|
||||
|
||||
content_type = _guess_content_type(local_path, content_type)
|
||||
|
||||
return upload_bytes(
|
||||
data,
|
||||
key=key,
|
||||
bucket=bucket,
|
||||
make_public=make_public,
|
||||
config=config,
|
||||
config_path=config_path,
|
||||
expires=expires,
|
||||
timeout=timeout,
|
||||
content_type=content_type,
|
||||
cache_control=cache_control,
|
||||
content_disposition=content_disposition,
|
||||
)
|
||||
|
||||
|
||||
def batch_upload(
|
||||
files: Iterable[str],
|
||||
key_prefix: str = "",
|
||||
bucket: Optional[str] = None,
|
||||
make_public: Optional[bool] = None,
|
||||
config: Optional[dict] = None,
|
||||
*,
|
||||
config_path: Optional[str] = None,
|
||||
expires: int = 300,
|
||||
timeout: int = DEFAULT_TIMEOUT,
|
||||
) -> list:
|
||||
"""Upload multiple files and return a list of URLs."""
|
||||
if config is None:
|
||||
config = load_config(config_path)
|
||||
|
||||
urls = []
|
||||
prefix = key_prefix.strip("/")
|
||||
|
||||
for file_path in files:
|
||||
filename = os.path.basename(file_path)
|
||||
key = f"{prefix}/{filename}" if prefix else None
|
||||
url = upload_file(
|
||||
file_path,
|
||||
key=key,
|
||||
bucket=bucket,
|
||||
make_public=make_public,
|
||||
config=config,
|
||||
expires=expires,
|
||||
timeout=timeout,
|
||||
)
|
||||
urls.append(url)
|
||||
return urls
|
||||
|
||||
|
||||
def fetch_and_upload(
|
||||
image_url: str,
|
||||
key: Optional[str] = None,
|
||||
bucket: Optional[str] = None,
|
||||
make_public: Optional[bool] = None,
|
||||
config: Optional[dict] = None,
|
||||
*,
|
||||
config_path: Optional[str] = None,
|
||||
expires: int = 300,
|
||||
timeout: int = DEFAULT_TIMEOUT,
|
||||
user_agent: str = DEFAULT_USER_AGENT,
|
||||
content_type: Optional[str] = None,
|
||||
) -> str:
|
||||
"""
|
||||
Download a remote image and upload to R2/S3.
|
||||
|
||||
Returns:
|
||||
URL string: public URL if make_public, otherwise presigned URL.
|
||||
"""
|
||||
headers = {"User-Agent": user_agent}
|
||||
req = urllib.request.Request(image_url, headers=headers)
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=timeout) as resp:
|
||||
data = resp.read()
|
||||
response_type = resp.headers.get("Content-Type", "")
|
||||
except urllib.error.HTTPError as exc:
|
||||
raise R2UploadError(f"HTTP {exc.code}: {exc.reason}") from exc
|
||||
except urllib.error.URLError as exc:
|
||||
raise R2UploadError(f"Network error: {exc.reason}") from exc
|
||||
|
||||
if not content_type and response_type:
|
||||
content_type = response_type.split(";")[0].strip() or None
|
||||
|
||||
if not content_type:
|
||||
content_type = _guess_content_type(image_url)
|
||||
|
||||
return upload_bytes(
|
||||
data,
|
||||
key=key,
|
||||
bucket=bucket,
|
||||
make_public=make_public,
|
||||
config=config,
|
||||
config_path=config_path,
|
||||
expires=expires,
|
||||
timeout=timeout,
|
||||
content_type=content_type,
|
||||
)
|
||||
Loading…
Reference in New Issue
Block a user