|
|
@@ -1,11 +1,13 @@
|
|
|
import { JsonCompatible } from '@vendure/common/lib/shared-types';
|
|
|
|
|
|
+import { CacheTtlProvider, DefaultCacheTtlProvider } from '../../cache/cache-ttl-provider';
|
|
|
import { Injector } from '../../common/index';
|
|
|
import { ConfigService, Logger } from '../../config/index';
|
|
|
import { CacheStrategy, SetCacheKeyOptions } from '../../config/system/cache-strategy';
|
|
|
import { TransactionalConnection } from '../../connection/index';
|
|
|
|
|
|
import { CacheItem } from './cache-item.entity';
|
|
|
+import { CacheTag } from './cache-tag.entity';
|
|
|
|
|
|
/**
|
|
|
* A {@link CacheStrategy} that stores the cache in memory using a simple
|
|
|
@@ -18,11 +20,13 @@ import { CacheItem } from './cache-item.entity';
|
|
|
*/
|
|
|
export class SqlCacheStrategy implements CacheStrategy {
|
|
|
protected cacheSize = 10_000;
|
|
|
+ protected ttlProvider: CacheTtlProvider;
|
|
|
|
|
|
- constructor(config?: { cacheSize?: number }) {
|
|
|
+ constructor(config?: { cacheSize?: number; cacheTtlProvider?: CacheTtlProvider }) {
|
|
|
if (config?.cacheSize) {
|
|
|
this.cacheSize = config.cacheSize;
|
|
|
}
|
|
|
+ this.ttlProvider = config?.cacheTtlProvider || new DefaultCacheTtlProvider();
|
|
|
}
|
|
|
|
|
|
protected connection: TransactionalConnection;
|
|
|
@@ -41,8 +45,7 @@ export class SqlCacheStrategy implements CacheStrategy {
|
|
|
});
|
|
|
|
|
|
if (hit) {
|
|
|
- const now = new Date().getTime();
|
|
|
- if (!hit.expiresAt || (hit.expiresAt && now < hit.expiresAt.getTime())) {
|
|
|
+ if (!hit.expiresAt || (hit.expiresAt && this.ttlProvider.getTime() < hit.expiresAt.getTime())) {
|
|
|
try {
|
|
|
return JSON.parse(hit.value);
|
|
|
} catch (e: any) {
|
|
|
@@ -58,15 +61,15 @@ export class SqlCacheStrategy implements CacheStrategy {
|
|
|
|
|
|
async set<T extends JsonCompatible<T>>(key: string, value: T, options?: SetCacheKeyOptions) {
|
|
|
const cacheSize = await this.connection.rawConnection.getRepository(CacheItem).count();
|
|
|
- if (cacheSize > this.cacheSize) {
|
|
|
+ if (cacheSize >= this.cacheSize) {
|
|
|
// evict oldest
|
|
|
const subQuery1 = this.connection.rawConnection
|
|
|
.getRepository(CacheItem)
|
|
|
.createQueryBuilder('item')
|
|
|
.select('item.id', 'item_id')
|
|
|
- .orderBy('item.updatedAt', 'DESC')
|
|
|
+ .orderBy('item.insertedAt', 'DESC')
|
|
|
.limit(1000)
|
|
|
- .offset(this.cacheSize);
|
|
|
+ .offset(Math.max(this.cacheSize - 1, 1));
|
|
|
const subQuery2 = this.connection.rawConnection
|
|
|
.createQueryBuilder()
|
|
|
.select('t.item_id')
|
|
|
@@ -81,17 +84,34 @@ export class SqlCacheStrategy implements CacheStrategy {
|
|
|
try {
|
|
|
await qb.execute();
|
|
|
} catch (e: any) {
|
|
|
- Logger.error(`An error occured when attempting to prune the cache: ${e.message as string}`);
|
|
|
+ Logger.error(`An error occurred when attempting to prune the cache: ${e.message as string}`);
|
|
|
}
|
|
|
}
|
|
|
- await this.connection.rawConnection.getRepository(CacheItem).upsert(
|
|
|
+ const item = await this.connection.rawConnection.getRepository(CacheItem).upsert(
|
|
|
new CacheItem({
|
|
|
key,
|
|
|
+ insertedAt: new Date(),
|
|
|
value: JSON.stringify(value),
|
|
|
- expiresAt: options?.ttl ? new Date(new Date().getTime() + options.ttl) : undefined,
|
|
|
+ expiresAt: options?.ttl ? new Date(this.ttlProvider.getTime() + options.ttl) : undefined,
|
|
|
}),
|
|
|
['key'],
|
|
|
);
|
|
|
+
|
|
|
+ if (options?.tags) {
|
|
|
+ for (const tag of options.tags) {
|
|
|
+ try {
|
|
|
+ await this.connection.rawConnection.getRepository(CacheTag).upsert(
|
|
|
+ {
|
|
|
+ tag,
|
|
|
+ item: item.identifiers[0],
|
|
|
+ } as any,
|
|
|
+ ['tag', 'itemId'],
|
|
|
+ );
|
|
|
+ } catch (e: any) {
|
|
|
+ Logger.error(`Error inserting tag`, e.message);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
async delete(key: string) {
|
|
|
@@ -99,4 +119,38 @@ export class SqlCacheStrategy implements CacheStrategy {
|
|
|
key,
|
|
|
});
|
|
|
}
|
|
|
+
|
|
|
+ async invalidateTags(tags: string[]) {
|
|
|
+ await this.connection.withTransaction(async ctx => {
|
|
|
+ const itemIds = await this.connection
|
|
|
+ .getRepository(ctx, CacheTag)
|
|
|
+ .createQueryBuilder('cache_tag')
|
|
|
+ .select('cache_tag.itemId')
|
|
|
+ .where('cache_tag.tag IN (:...tags)', { tags })
|
|
|
+ .groupBy('cache_tag.itemId')
|
|
|
+ .groupBy('cache_tag.id')
|
|
|
+ .getMany();
|
|
|
+
|
|
|
+ await this.connection
|
|
|
+ .getRepository(ctx, CacheTag)
|
|
|
+ .createQueryBuilder('cache_tag')
|
|
|
+ .delete()
|
|
|
+ .where('cache_tag.tag IN (:...tags)', { tags })
|
|
|
+ .execute();
|
|
|
+
|
|
|
+ if (itemIds.length) {
|
|
|
+ const ids = itemIds.map(i => i.itemId);
|
|
|
+ const batchSize = 1000;
|
|
|
+
|
|
|
+ for (let i = 0; i < itemIds.length; i += batchSize) {
|
|
|
+ const batch = ids.slice(i, batchSize);
|
|
|
+ try {
|
|
|
+ await this.connection.getRepository(ctx, CacheItem).delete(batch);
|
|
|
+ } catch (e: any) {
|
|
|
+ Logger.error(`Error deleting items`, e.message);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ });
|
|
|
+ }
|
|
|
}
|