Skip to content

Commit

Permalink
fix: ✅ integration test setup
Browse files Browse the repository at this point in the history
  • Loading branch information
ignazio-bovo committed Jun 19, 2024
1 parent ae510bc commit 81ea8fb
Show file tree
Hide file tree
Showing 8 changed files with 47 additions and 22 deletions.
2 changes: 1 addition & 1 deletion .env
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ SQUID_GQL_PORT=4352
ENABLE_STORAGE_PROVIDER=true

# Configuration for localstack testing only
LOCALSTACK_ENABLED=false
LOCALSTACK_ENABLED=true
LOCALSTACK_PORT=4566

## Specify the cloud storage provider to use:
Expand Down
11 changes: 6 additions & 5 deletions storage-node/src/commands/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -234,8 +234,8 @@ Supported values: warn, error, debug, info. Default:debug`,
logger.warn(`Only subset of buckets will process uploads!`)
}

logger.info(`Buckets synced and served: ${selectedBuckets}`)
logger.info(`Buckets accepting uploads: ${writableBuckets}`)
logger.info(`Buckets synced and served: [${selectedBuckets}]`)
logger.info(`Buckets accepting uploads: [${writableBuckets}]`)

if (!flags.tempFolder) {
logger.warn(
Expand Down Expand Up @@ -266,6 +266,10 @@ Supported values: warn, error, debug, info. Default:debug`,
this.error('Paths for pending and uploads folders must be unique.')
}

// initialise storage provider connection: undefined if not enabled
storageProviderConnection = await parseConfigOptionAndBuildConnection()
logger.debug(`remote storage provider connection status: ${isStorageProviderConnectionEnabled()}`)

await createDirectory(flags.uploads)
await loadDataObjectIdCache(flags.uploads)

Expand Down Expand Up @@ -332,9 +336,6 @@ Supported values: warn, error, debug, info. Default:debug`,
logger.warn(`Cleanup service is Disabled.`)
}

// initialise storage provider connection: undefined if not enabled
storageProviderConnection = await parseConfigOptionAndBuildConnection()

try {
const port = flags.port
const maxFileSize = await api.consts.storage.maxDataObjectSize.toNumber()
Expand Down
1 change: 1 addition & 0 deletions storage-node/src/services/caching/localDataObjects.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ export async function loadDataObjectIdCache(uploadDir: string): Promise<void> {
}

if (!isStorageProviderConnectionEnabled()) {
logger.debug(`no storage provider connection enabled for cache loading`)
return
}
const connection = getStorageProviderConnection()!
Expand Down
4 changes: 4 additions & 0 deletions storage-node/src/services/helpers/acceptObject.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import fs from 'fs'
import logger from '../logger'
import { getStorageProviderConnection, isStorageProviderConnectionEnabled } from '../../commands/server'
import { addDataObjectIdToCache } from '../caching/localDataObjects'
import { moveFile } from './moveFile'
Expand All @@ -15,10 +16,13 @@ export async function acceptObject(
throw new Error('Destination path is undefined')
}
await moveFile(src, dest)
logger.info(`File ${filename} accepted on local volume`)
} else {
const connection = getStorageProviderConnection()!
await connection.uploadFileToRemoteBucket(filename, src.toString())
await fsPromises.unlink(src)
logger.info(`File ${filename} accepted to remote storage`)
}
addDataObjectIdToCache(filename)
logger.info(`File ${filename} added to local cache`)
}
35 changes: 26 additions & 9 deletions storage-node/src/services/storageProviders/awsConnectionHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,21 +28,38 @@ export class AwsConnectionHandler implements IConnectionHandler {
private multiPartThresholdGB = 5

constructor(opts: AwsConnectionHandlerParams) {
this.client = new S3Client({
if (process.env.LOCALSTACK_ENABLED === 'true') {
this.client = this.constructWithLocalstack(opts)
} else {
this.client = this.constructProduction(opts)
}
this.bucket = opts.bucketName
logger.info(
`AWS connection handler initialized with bucket config ${
process.env.LOCALSTACK_ENABLED === 'true' ? 'LOCALSTACK' : 'PRODUCTION'
}`
)
}

private constructProduction(opts: AwsConnectionHandlerParams): S3Client {
return new S3Client({
credentials: {
accessKeyId: opts.accessKeyId,
secretAccessKey: opts.secretAccessKey,
},
region: opts.region,
endpoint:
process.env.LOCALSTACK_ENABLED === 'true'
? `http://localhost:${process.env.LOCALSTACK_PORT || 4566}/`
: undefined,
tls: process.env.LOCALSTACK_ENABLED === 'true' ? false : undefined,
forcePathStyle: process.env.LOCALSTACK_ENABLED === 'true',
})
this.bucket = opts.bucketName
logger.info(`AWS connection handler initialized with bucket: ${this.bucket}`)
}
private constructWithLocalstack(opts: AwsConnectionHandlerParams): S3Client {
return new S3Client({
credentials: {
accessKeyId: opts.accessKeyId,
secretAccessKey: opts.secretAccessKey,
},
endpoint: process.env.LOCALSTACK_ENDPOINT!,
tls: false,
forcePathStyle: true,
})
}

private isSuccessfulResponse(response: any): boolean {
Expand Down
2 changes: 2 additions & 0 deletions storage-node/src/services/webApi/controllers/filesApi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ export async function getFile(
try {
pinDataObjectIdToCache(dataObjectId)
} catch (err) {
logger.error(`Error pinning file ${dataObjectId} to cache`)
res.status(404).send()
return
}
Expand Down Expand Up @@ -94,6 +95,7 @@ export async function getFile(
const connection = getStorageProviderConnection()!

const url = await connection.getRedirectUrlForObject(dataObjectId)
logger.info(`Creating presigned url for remote file ${url}`)

// Redirect to the remote file
res.redirect(url)
Expand Down
2 changes: 1 addition & 1 deletion tests/network-tests/run-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ function cleanup() {
docker-compose -f ../../docker-compose.yml down -v
}

trap cleanup EXIT ERR SIGINT SIGTERM
# trap cleanup EXIT ERR SIGINT SIGTERM

export JOYSTREAM_NODE_TAG=${JOYSTREAM_NODE_TAG:-latest}
RUNTIME_PROFILE=TESTING ../../scripts/runtime-code-shasum.sh
Expand Down
12 changes: 6 additions & 6 deletions tests/network-tests/start-storage.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
set -e

source ../../.env
source .env

TMP=$0
THIS_DIR=$(dirname $TMP)
Expand All @@ -15,6 +15,7 @@ export COLOSSUS_1_URL="http://${HOST_IP}:3333"
export DISTRIBUTOR_1_URL="http://${HOST_IP}:3334"
export COLOSSUS_2_URL="http://${HOST_IP}:3335"
export DISTRIBUTOR_2_URL="http://${HOST_IP}:3336"
export LOCALSTACK_ENDPOINT="http://${HOST_IP}:4566"
$THIS_DIR/run-test-scenario.sh initStorageAndDistribution

# give QN time to catch up so nodes can get their initial state
Expand All @@ -27,11 +28,10 @@ docker-compose -f $THIS_DIR/../../docker-compose.yml up -d colossus-2
docker-compose -f $THIS_DIR/../../docker-compose.yml up -d distributor-2

# Start localstack if ENABLE_LOCALSTACK is set to true
if [ "$LOCALSTACK_ENABLED" == "true" ]; then
docker-compose -f $THIS_DIR/../../docker-compose.yml up -d localstack
awslocal s3api create-bucket --bucket "$AWS_BUCKET_NAME"

fi
# if [ $LOCALSTACK_ENABLED == true ]; then
docker-compose -f $THIS_DIR/../../docker-compose.yml up -d localstack
awslocal s3api create-bucket --bucket $AWS_BUCKET_NAME --endpoint http://localhost:4566
# fi

# allow a few seconds for nodes to startup and display first few log entries
# to help debug tests
Expand Down

0 comments on commit 81ea8fb

Please sign in to comment.