Skip to content

Commit

Permalink
Merge pull request #165 from Maxxen/dev
Browse files Browse the repository at this point in the history
More move returns, update extension upload script
  • Loading branch information
Maxxen authored Oct 30, 2023
2 parents 0aaa906 + 98c7b57 commit 90c6a7a
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 12 deletions.
44 changes: 35 additions & 9 deletions scripts/extension-upload.sh
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,25 +1,51 @@
#!/bin/bash

# Usage: ./extension-upload.sh <name> <extension_version> <duckdb_version> <architecture> <s3_bucket> <copy_to_latest>
# Extension upload script

# Usage: ./extension-upload-oote.sh <name> <extension_version> <duckdb_version> <architecture> <s3_bucket> <copy_to_latest> <copy_to_versioned>
# <name> : Name of the extension
# <extension_version> : Version (commit / version tag) of the extension
# <duckdb_version> : Version (commit / version tag) of DuckDB
# <architecture> : Architecture target of the extension binary
# <s3_bucket> : S3 bucket to upload to
# <copy_to_latest> : Set this as the latest version ("true" / "false", default: "false")
# <copy_to_latest> : Set this as a versioned version that will prevent its deletion

set -e

ext="build/release/extension/$1/$1.duckdb_extension"
ext="/tmp/extension/$1.duckdb_extension"

script_dir="$(dirname "$(readlink -f "$0")")"

# Abort if AWS key is not set
if [ -z "$AWS_ACCESS_KEY_ID" ]; then
echo "No AWS key found, skipping.."
exit 0
fi

# (Optionally) Sign binary
if [ "$DUCKDB_EXTENSION_SIGNING_PK" != "" ]; then
echo "$DUCKDB_EXTENSION_SIGNING_PK" > private.pem
$script_dir/../duckdb/scripts/compute-extension-hash.sh $ext > $ext.hash
openssl pkeyutl -sign -in $ext.hash -inkey private.pem -pkeyopt digest:sha256 -out $ext.sign
cat $ext.sign >> $ext
fi

set -e

# compress extension binary
gzip < $ext > "$1.duckdb_extension.gz"
gzip < "${ext}" > "$ext.gz"

# upload compressed extension binary to S3
aws s3 cp $1.duckdb_extension.gz s3://$5/$1/$2/$3/$4/$1.duckdb_extension.gz --acl public-read
# upload versioned version
if [[ $7 = 'true' ]]; then
aws s3 cp $ext.gz s3://$5/$1/$2/$3/$4/$1.duckdb_extension.gz --acl public-read
fi

if [ $6 = 'true']
then
aws s3 cp $1.duckdb_extension.gz s3://$5/$1/latest/$3/$4/$1.duckdb_extension.gz --acl public-read
# upload to latest version
if [[ $6 = 'true' ]]; then
aws s3 cp $ext.gz s3://$5/$3/$4/$1.duckdb_extension.gz --acl public-read
fi
# also uplo

if [ "$DUCKDB_EXTENSION_SIGNING_PK" != "" ]; then
rm private.pem
fi
5 changes: 3 additions & 2 deletions spatial/src/spatial/core/functions/table/st_read_osm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ static unique_ptr<GlobalTableFunctionState> InitGlobal(ClientContext &context, T
throw ParserException("First blob in file is not a header");
}

return global_state;
return std::move(global_state);
}

struct LocalState : LocalTableFunctionState {
Expand Down Expand Up @@ -797,7 +797,8 @@ static unique_ptr<LocalTableFunctionState> InitLocal(ExecutionContext &context,
}
auto block = DecompressBlob(context.client, *blob);

return make_uniq<LocalState>(std::move(block));
auto result = make_uniq<LocalState>(std::move(block));
return std::move(result);
}

static void Execute(ClientContext &context, TableFunctionInput &input, DataChunk &output) {
Expand Down
3 changes: 2 additions & 1 deletion spatial/src/spatial/gdal/functions/st_drivers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ unique_ptr<FunctionData> GdalDriversTableFunction::Bind(ClientContext &context,

unique_ptr<GlobalTableFunctionState> GdalDriversTableFunction::Init(ClientContext &context,
TableFunctionInitInput &input) {
return make_uniq<State>();
auto result = make_uniq<State>();
return std::move(result);
}

void GdalDriversTableFunction::Execute(ClientContext &context, TableFunctionInput &input, DataChunk &output) {
Expand Down

0 comments on commit 90c6a7a

Please sign in to comment.