Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 1 addition & 11 deletions .github/workflows/sync-release-to-main.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,5 @@ jobs:
--base main \
--head release \
--title "chore: sync release to main" \
--body "Automated sync of release tags back to main."
fi

- name: Enable auto-merge on sync PR
if: steps.check.outputs.ahead != '0'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR=$(gh pr list --repo ${{ github.repository }} --base main --head release --state open --json number --jq '.[0].number')
if [ -n "$PR" ]; then
gh pr merge "$PR" --repo ${{ github.repository }} --merge --auto
--body "Automated sync of release tags back to main. Merge with **merge commit**."
fi
2 changes: 1 addition & 1 deletion scripts/generate-registry.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
* Run: npm run generate:registry
*/

import { readFileSync, existsSync, writeFileSync } from 'fs';
import { existsSync, readFileSync, writeFileSync } from 'fs';
import { join } from 'path';
import * as YAML from 'yaml';

Expand Down
5 changes: 3 additions & 2 deletions scripts/update-docs.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { readFileSync, writeFileSync, existsSync } from 'fs';
import { join, dirname } from 'path';
import { existsSync, readFileSync, writeFileSync } from 'fs';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
import * as yaml from 'yaml';

import type { CommandSpec } from '../src/types.js';

const __filename = fileURLToPath(import.meta.url);
Expand Down
156 changes: 156 additions & 0 deletions src/lib/bundle.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
import { getStorageConfig } from '@auth/provider.js';
import { bundle } from '@tigrisdata/storage';
import { exitWithError } from '@utils/exit.js';
import { getFormat, getOption, readStdin } from '@utils/options.js';
import { parseAnyPath } from '@utils/path.js';
import { createWriteStream, existsSync, readFileSync } from 'fs';
import { Readable } from 'stream';
import { pipeline } from 'stream/promises';

const MAX_KEYS = 5000;

function parseKeys(content: string): string[] {
return content
.split('\n')
.map((line) => line.trim())
.filter((line) => line.length > 0 && !line.startsWith('#'));
}

function detectCompression(
outputPath: string
): 'none' | 'gzip' | 'zstd' | undefined {
if (outputPath.endsWith('.tar.gz') || outputPath.endsWith('.tgz')) {
return 'gzip';
}
if (outputPath.endsWith('.tar.zst')) {
return 'zstd';
}
if (outputPath.endsWith('.tar')) {
return 'none';
}
return undefined;
}

export default async function bundleCommand(options: Record<string, unknown>) {
const bucketArg = getOption<string>(options, ['bucket']);
const keysArg = getOption<string>(options, ['keys', 'k']);
const outputPath = getOption<string>(options, ['output', 'o']);
const compressionArg = getOption<string>(options, ['compression']);
const onError = getOption<string>(options, ['on-error', 'onError'], 'skip');
const format = getFormat(options);
const jsonMode = format === 'json';

// stdout carries binary data when no --output
const stdoutBinary = !outputPath;

if (!bucketArg) {
exitWithError('Bucket is required');
}

const { bucket, path: prefix } = parseAnyPath(bucketArg);

if (!bucket) {
exitWithError('Invalid bucket');
}

// Resolve keys: file, inline, or stdin
let keys: string[];

if (keysArg) {
if (keysArg.includes(',')) {
// Commas present → always treat as inline comma-separated keys
keys = keysArg
.split(',')
.map((k) => k.trim())
.filter((k) => k.length > 0);
} else if (existsSync(keysArg)) {
// No commas and local file exists → read as keys file
keys = parseKeys(readFileSync(keysArg, 'utf-8'));
} else {
// Single key
keys = [keysArg.trim()];
}
} else if (!process.stdin.isTTY) {
const input = await readStdin();
keys = parseKeys(input);
} else {
exitWithError('Keys are required. Provide via --keys or pipe to stdin.');
}

// Prepend path prefix from bucket arg (e.g. t3://bucket/prefix)
if (prefix) {
const normalizedPrefix = prefix.endsWith('/') ? prefix : `${prefix}/`;
keys = keys.map((key) => `${normalizedPrefix}${key}`);
}

if (keys.length === 0) {
exitWithError('No keys found');
}

if (keys.length > MAX_KEYS) {
exitWithError(`Too many keys (max ${MAX_KEYS}). Got ${keys.length}`);
}

// Resolve compression: explicit flag > auto-detect from extension > default
let compression: 'none' | 'gzip' | 'zstd' = 'none';
if (compressionArg) {
compression = compressionArg as 'none' | 'gzip' | 'zstd';
} else if (outputPath) {
compression = detectCompression(outputPath) ?? 'none';
}

if (!stdoutBinary && !jsonMode) {
process.stderr.write(`Bundling ${keys.length} object(s)...\n`);
}

const config = await getStorageConfig({ withCredentialProvider: true });

const { data, error } = await bundle(keys, {
config: { ...config, bucket },
compression,
onError: onError as 'skip' | 'fail',
});

if (error) {
exitWithError(error);
}

const nodeStream = Readable.fromWeb(data.body as ReadableStream);

if (outputPath) {
const writeStream = createWriteStream(outputPath);
await pipeline(nodeStream, writeStream);

if (jsonMode) {
console.log(
JSON.stringify({
action: 'bundled',
bucket,
keys: keys.length,
compression,
output: outputPath,
})
);
} else {
console.log(
`Bundled ${keys.length} object(s) from '${bucket}' to ${outputPath}`
);
}
} else {
await pipeline(nodeStream, process.stdout);

if (jsonMode) {
console.error(
JSON.stringify({
action: 'bundled',
bucket,
keys: keys.length,
compression,
output: 'stdout',
})
);
}
}

process.exit(0);
}
8 changes: 1 addition & 7 deletions src/lib/iam/policies/utils.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,6 @@
import type { PolicyDocument } from '@tigrisdata/iam';

export async function readStdin(): Promise<string> {
const chunks: Buffer[] = [];
for await (const chunk of process.stdin) {
chunks.push(chunk);
}
return Buffer.concat(chunks).toString('utf-8');
}
export { readStdin } from '@utils/options.js';

export function parseDocument(jsonString: string): PolicyDocument {
const raw = JSON.parse(jsonString);
Expand Down
35 changes: 35 additions & 0 deletions src/specs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -546,6 +546,41 @@ commands:
alias: f
description: Skip confirmation prompts (alias for --yes)

# bundle
- name: bundle
description: Download multiple objects as a streaming tar archive in a single request. Designed for batch workloads that need many objects without per-object HTTP overhead
examples:
- "tigris bundle my-bucket --keys key1.jpg,key2.jpg --output archive.tar"
- "tigris bundle my-bucket --keys keys.txt --output archive.tar"
- "tigris bundle t3://my-bucket --keys keys.txt --compression gzip -o archive.tar.gz"
- "cat keys.txt | tigris bundle my-bucket > archive.tar"
messages:
onStart: ''
onSuccess: ''
onFailure: 'Bundle failed. Verify the bucket exists and credentials have read access'
arguments:
- name: bucket
required: true
type: positional
description: Bucket name or t3:// path containing the objects to bundle
examples:
- my-bucket
- t3://my-bucket
- name: keys
description: "Comma-separated object keys, or path to a file with one key per line. If a local file matching the value exists, it is read as a keys file. If omitted, reads keys from stdin"
alias: k
- name: output
description: Output file path. Defaults to stdout (for piping)
alias: o
- name: compression
description: Compression algorithm for the archive
options: [none, gzip, zstd]
default: none
- name: on-error
description: How to handle missing objects. 'skip' omits them, 'fail' aborts the request
options: [skip, fail]
default: skip

#########################
# Manage organizations
#########################
Expand Down
12 changes: 12 additions & 0 deletions src/utils/options.ts
Original file line number Diff line number Diff line change
Expand Up @@ -73,3 +73,15 @@ export function parseBoolean(
if (typeof value === 'boolean') return value;
return value === 'true';
}

/**
* Read all of stdin as a UTF-8 string.
* Use when stdin is piped (i.e. `!process.stdin.isTTY`).
*/
export async function readStdin(): Promise<string> {
const chunks: Buffer[] = [];
for await (const chunk of process.stdin) {
chunks.push(chunk);
}
return Buffer.concat(chunks).toString('utf-8');
}
Loading