diff --git a/README.md b/README.md index 0d07ad8..caefc07 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ Public tester deployment can be found in https://tester.garush.dev ## Local Development -To start all docker third party service: +To start all docker third-party services: ```shell docker-compose up -d @@ -54,15 +54,12 @@ Run the docker services: docker-compose up -d ``` -The first time your run the services, you need to create the minio/s3 access key. +The first time you run the services, you need to create the minio/s3 access key: +- Go to the Minio Console's user tab: http://localhost:9001/users (`minioadmin:minioadmin`) +- Create a user with access and key `minioadmin1:minioadmin1`. These keys are the backend's default. +- Give this user all the permissions. -Go to the Minio Console's user tab - -http://localhost:9001/users (`minioadmin:minioadmin`) - -And create a user with access and key `minioadmin1:minioadmin1`. These keys are the backend's default. - -Give this user all the permissions. TODO: Automatize the minio/s3 token for local development +TODO: Automatize the minio/s3 token creation for local development. To run the backend: @@ -77,7 +74,7 @@ Try: - http://localhost:3000/art - http://localhost:3000/file -The bullmq job would be populating the database and s3 bucket from the symbol and garush networks. +The `bullmq` job will be populating the database and s3 bucket from the Symbol and Garush networks. ## Services: diff --git a/packages/backend/src/config/configuration.ts b/packages/backend/src/config/configuration.ts index 6fa5275..b82c512 100644 --- a/packages/backend/src/config/configuration.ts +++ b/packages/backend/src/config/configuration.ts @@ -50,8 +50,7 @@ export interface LoggerConfiguration { } export interface BullMQConfiguration { - logLevel: string; - logFileName: string; + refreshArtJobSchedulerEvery: number; } export interface RedisConfiguration { @@ -105,8 +104,7 @@ export default (): Configuration => ({ password: process.env.REDIS_PASSWORD, }, bullmq: { - logLevel: process.env.LOGGER_LOG_LEVEL || 'info', - logFileName: process.env.LOGGER_LOG_FILE_NAME || 'logs/garush.log', + refreshArtJobSchedulerEvery: parseInt(process.env.BULLMQ_REFRESH_ART_JOB_SCHEDULER_EVERY) || 60000, }, s3: { diff --git a/packages/backend/src/main.ts b/packages/backend/src/main.ts index 4041589..797c356 100644 --- a/packages/backend/src/main.ts +++ b/packages/backend/src/main.ts @@ -31,15 +31,22 @@ async function bootstrap() { const app = await NestFactory.create(AppModule); const configService = app.get>(ConfigService); const loggerConfiguration = configService.get('logger'); - const serverConfiguration = configService.get('server'); + const appName = 'Garush'; const logger = WinstonModule.createLogger({ transports: [ new winston.transports.Console({ - format: winston.format.combine(winston.format.timestamp(), nestWinstonModuleUtilities.format.nestLike()), + format: winston.format.combine( + winston.format.errors({ stack: true }), + winston.format.timestamp(), + nestWinstonModuleUtilities.format.nestLike(appName, { prettyPrint: true }), + ), }), new winston.transports.File({ - format: winston.format.combine(winston.format.timestamp(), nestWinstonModuleUtilities.format.nestLike()), + format: winston.format.combine( + winston.format.timestamp(), + nestWinstonModuleUtilities.format.nestLike(appName, { prettyPrint: true }), + ), level: loggerConfiguration.logLevel, filename: loggerConfiguration.logFileName, }), @@ -50,10 +57,10 @@ async function bootstrap() { app.use(morgan('tiny')); app.enableCors(); const options = new DocumentBuilder() - .setTitle('Garush') - .setDescription('Garush API description') + .setTitle(appName) + .setDescription(appName + ' API description') .setVersion('0.0.1') - .addTag('garush') + .addTag(appName.toLowerCase()) .addBearerAuth() .build(); const document = SwaggerModule.createDocument(app, options); @@ -82,6 +89,7 @@ async function bootstrap() { }); } + const serverConfiguration = configService.get('server'); await app.listen(serverConfiguration.port); logger.log(`Application is running on: ${await app.getUrl()}`); } diff --git a/packages/backend/src/modules/art/art.job.ts b/packages/backend/src/modules/art/art.job.ts index fd6086e..9b7ddc3 100644 --- a/packages/backend/src/modules/art/art.job.ts +++ b/packages/backend/src/modules/art/art.job.ts @@ -1,14 +1,31 @@ import { InjectQueue, Process, Processor } from '@nestjs/bull'; -import { Inject, Injectable } from '@nestjs/common'; +import { Inject, Injectable, Logger } from '@nestjs/common'; +import { ConfigService } from '@nestjs/config'; import { Queue } from 'bull'; import { Network } from '../../common/network'; +import { BullMQConfiguration, Configuration } from '../../config/configuration'; import { ArtService } from './art.service'; @Injectable() @Processor('garush') export class ArtJobService { - constructor(@InjectQueue('garush') private queue: Queue, @Inject(ArtService) private readonly artService: ArtService) { - queue.add('refresh_art', {}, { repeat: { every: 60000 } }); + private readonly logger = new Logger(ArtJobService.name); + + constructor( + @InjectQueue('garush') private queue: Queue, + @Inject(ArtService) private readonly artService: ArtService, + private configService: ConfigService, + ) { + const bullMQConfiguration = configService.get('bullmq'); + const every = bullMQConfiguration.refreshArtJobSchedulerEvery; + queue.add('refresh_art', {}, { repeat: { every: every } }).then( + () => { + this.logger.log(`refresh_art job running every ${every / 1000} seconds`); + }, + (e) => { + this.logger.error(`refresh_art job didn't start. Error: ${e.message || e}`, e); + }, + ); } @Process('refresh_art') diff --git a/packages/backend/src/modules/art/art.service.ts b/packages/backend/src/modules/art/art.service.ts index 2f6d131..2167bb0 100644 --- a/packages/backend/src/modules/art/art.service.ts +++ b/packages/backend/src/modules/art/art.service.ts @@ -3,14 +3,15 @@ import { Inject, Injectable, Logger } from '@nestjs/common'; import { InjectModel } from '@nestjs/sequelize'; import { Queue } from 'bull'; import { Art, SearchService, Utils } from 'garush-storage'; -import { EMPTY, firstValueFrom, from, isObservable, Observable } from 'rxjs'; +import { catchError, count, EMPTY, firstValueFrom, from, isObservable, Observable } from 'rxjs'; import { mergeMap, tap } from 'rxjs/operators'; import { BackendUtils } from '../../common/backend.utils'; -import { Network } from '../../common/network'; +import { getName, Network } from '../../common/network'; import { FileService } from '../file/file.service'; import { NetworkService } from '../network/network.service'; import { ArtEntity, ArtEntityAttributes } from './models/art.entity'; import { ArtRefreshLogEntity } from './models/art.refresh.log.entity'; + @Injectable() export class ArtService { private readonly logger = new Logger(ArtService.name); @@ -34,18 +35,20 @@ export class ArtService { return this.artEntityModel.findOne({ where: { mosaicId: mosaicId } }); } - private async createArt(art: Art, network: Network): Promise { - // this.refreshStorage(network, art.rootTransactionHash); + private async createArt(art: Art, network: Network): Promise { if ( !(await this.artEntityModel.findOne({ where: { mosaicId: art.mosaicId.toHex() }, })) ) { const file = await this.fileService.refresh(network, art.rootTransactionHash); + if (!file) { + return undefined; + } const royalty = parseInt(art.metadata.userData?.royalty?.toString()) || 0; const description = art.metadata.userData?.description?.toString() || ''; if (!file.id) { - throw new Error('File.id must defined!!'); + throw new Error('File.id must be defined!!'); } const entity: ArtEntityAttributes = { @@ -88,70 +91,67 @@ export class ArtService { if (log) { return log; } - return this.artRefreshLogEntityModel.create({ network: network, - toHeight: BigInt(0), fromHeight: BigInt(0), + toHeight: BigInt(0), total: 0, }); } async refreshArtGivenNetwork(network: Network) { - const repositoryFactory = this.networkService.getRepositoryFactory(network); - const searchService = new SearchService(repositoryFactory); - const currentLog = await this.getOrCreateCurrentLog(network); - const fromHeight: bigint = BackendUtils.toBigInt(currentLog.toHeight) + BigInt(1); - const chainInfo = await firstValueFrom(repositoryFactory.createChainRepository().getChainInfo()); - const height = Utils.toBigInt(chainInfo.height); - const maxToHeight = fromHeight + BigInt(100000); - const toHeight = height < maxToHeight ? height : maxToHeight; - if (toHeight > height) { - return; - } - this.logger.log(`reloading arts ${network}`); - const artSubscriber: Observable = await searchService.search({ - fromHeight: fromHeight, - toHeight: toHeight, - }); - - if (isObservable(artSubscriber)) { - // I have hacked this with "rxjs": "file:../storage/node_modules/rxjs", - console.log('Observable :)'); - } else { - console.error('NO OBSERVABLE!!'); - } - let total = 0; + try { + const repositoryFactory = this.networkService.getRepositoryFactory(network); + const searchService = new SearchService(repositoryFactory); + const currentLog = await this.getOrCreateCurrentLog(network); + const fromHeight: bigint = BackendUtils.toBigInt(currentLog.toHeight) + BigInt(1); + const chainInfo = await firstValueFrom(repositoryFactory.createChainRepository().getChainInfo()); + const height = Utils.toBigInt(chainInfo.height); + const maxToHeight = fromHeight + BigInt(10000); + const toHeight = height < maxToHeight ? height : maxToHeight; + if (toHeight <= fromHeight) { + return; + } + this.logger.log(`Reloading arts from ${getName(network)} network, blocks ${fromHeight} to ${toHeight}`); + const artSubscriber: Observable = await searchService.search({ + fromHeight: fromHeight, + toHeight: toHeight, + }); - artSubscriber - .pipe( - mergeMap((art: Art) => { - const entity = this.createArt(art, network); - console.log(typeof entity); - if (!entity) { + if (isObservable(artSubscriber)) { + // I have hacked this with "rxjs": "file:../storage/node_modules/rxjs", + this.logger.log('Observable :)'); + } else { + this.logger.error('NO OBSERVABLE!!'); + } + const total = await firstValueFrom( + artSubscriber.pipe( + mergeMap((art: Art) => { + const entity = this.createArt(art, network); + if (!entity) { + return EMPTY; + } + return from(entity); + }), + catchError((e: any) => { + this.logger.error(`There has been an error refreshing art. ${e.message}`, e); return EMPTY; - } - return from(entity); - }), - tap(() => { - total++; - }), - ) - .subscribe( - (entity) => { - if (entity) this.logger.log(`ART with mosaicId ${entity.mosaicId} saved!`); - }, - (e) => { - console.log(e); - this.logger.error(e); - }, - () => { - this.artRefreshLogEntityModel.create({ - network: network, - toHeight: toHeight, - fromHeight: fromHeight, - total: total, - }); - }, + }), + tap((entity) => { + this.logger.log(`Art with mosaicId ${entity.mosaicId} saved!`); + }), + count(), + ), ); + await this.artRefreshLogEntityModel.create({ + network: network, + toHeight: toHeight, + fromHeight: fromHeight, + total: total, + }); + this.logger.log(`Arts from ${getName(network)} network, blocks ${fromHeight} to ${toHeight}, refreshed! ${total} arts found.`); + } catch (e) { + this.logger.error(`There has been an error refresh art for network ${getName(network)}. Error ${e.message}`, e); + console.log(e); + } } } diff --git a/packages/backend/src/modules/art/models/art.refresh.log.entity.ts b/packages/backend/src/modules/art/models/art.refresh.log.entity.ts index a145d7b..9015337 100644 --- a/packages/backend/src/modules/art/models/art.refresh.log.entity.ts +++ b/packages/backend/src/modules/art/models/art.refresh.log.entity.ts @@ -12,6 +12,18 @@ export interface ArtRefreshLogEntityAttributes { @Table({ tableName: 'art_refresh_log_entity', timestamps: true, + indexes: [ + { + name: 'unique_network_fromHeight', + unique: true, + fields: ['network', 'fromHeight'], + }, + { + name: 'unique_network_toHeight', + unique: true, + fields: ['network', 'toHeight'], + }, + ], }) export class ArtRefreshLogEntity extends Model { @Column({ @@ -21,10 +33,10 @@ export class ArtRefreshLogEntity extends Model { - const service = this.getStorage(network); - let metadata = await this.refreshS3File(network, rootTransactionHash); - if (!metadata) { - metadata = await service.loadMetadataFromHash(rootTransactionHash); - } - let file = await this.fileEntityModel.findOne({ - where: { rootTransactionHash: rootTransactionHash }, - }); - if (!file) { - const creationHeight = Utils.toBigInt(metadata.rootTransaction.transactionInfo!.height!); - file = await this.fileEntityModel.create({ - network, - name: metadata.name, - header: metadata.header, - size: metadata.size, - mime: metadata.mime, - parser: metadata.parser, - version: metadata.version, - creationHeight: creationHeight.toString(), - rootTransactionHash, + async refresh(network: Network, rootTransactionHash: string): Promise { + try { + const metadata = await this.refreshS3File(network, rootTransactionHash); + if (!metadata) { + return undefined; + } + let file = await this.fileEntityModel.findOne({ + where: { rootTransactionHash: rootTransactionHash }, }); + if (!file) { + const creationHeight = Utils.toBigInt(metadata.rootTransaction.transactionInfo!.height!); + file = await this.fileEntityModel.create({ + network, + name: metadata.name, + header: metadata.header, + size: metadata.size, + mime: metadata.mime, + parser: metadata.parser, + version: metadata.version, + creationHeight: creationHeight.toString(), + rootTransactionHash, + }); + } + return file; + } catch (e) { + this.logger.warn(`File with hash ${rootTransactionHash} cannot be logged from network ${network}. Error ${e}`, e); + return undefined; } - return file; } async refreshS3File(network: Network, rootTransactionHash: string): Promise { @@ -92,24 +96,27 @@ export class FileService { Bucket: s3.bucket, Key: key, }); - try { await client.send(getObjectCommand); this.logger.log(`Object with key ${key} does exist!`); - return undefined; + return await service.loadMetadataFromHash(rootTransactionHash); } catch (e) { - this.logger.error(e); - this.logger.log(`Object with key ${key} does not exist! Storing!`); - const file = await service.loadFileFromHash(rootTransactionHash); - this.logger.log(file.content.length); - const command = new PutObjectCommand({ - Bucket: s3.bucket, - ContentType: file.metadata.mime, - Body: file.content, - Key: key, - }); - await client.send(command); - return file.metadata; + if (e.name === 'NoSuchKey') { + this.logger.log(`Object with key ${key} does NOT exist! Storing!`); + const file = await service.loadFileFromHash(rootTransactionHash); + this.logger.log(`Storing file size ${file.content.length}`); + const command = new PutObjectCommand({ + Bucket: s3.bucket, + ContentType: file.metadata.mime, + Body: file.content, + Key: key, + }); + await client.send(command); + return file.metadata; + } else { + this.logger.error(`Unknown error loading S3 Object. ${e.message}`, e); + return undefined; + } } finally { client.destroy(); }