Merge branch 'pr/qixing-jk/3117'

This commit is contained in:
tangly1024
2025-01-05 19:30:29 +08:00
5 changed files with 120 additions and 63 deletions

View File

@@ -1,46 +1,7 @@
import BLOG from '@/blog.config'
import FileCache from './local_file_cache'
import MemoryCache from './memory_cache'
// 配置是否开启Vercel环境中的缓存因为Vercel中现有两种缓存方式在无服务环境下基本都是无意义的纯粹的浪费资源
const enableCacheInVercel =
process.env.npm_lifecycle_event === 'build' ||
process.env.npm_lifecycle_event === 'export' ||
!BLOG['isProd']
/**
* 尝试从缓存中获取数据,如果没有则尝试获取数据并写入缓存,最终返回所需数据
* @param key
* @param getDataFunction
* @param getDataArgs
* @returns {Promise<*|null>}
*/
export async function getOrSetDataWithCache(key, getDataFunction, ...getDataArgs) {
return getOrSetDataWithCustomCache(key, null, getDataFunction, ...getDataArgs)
}
/**
* 尝试从缓存中获取数据,如果没有则尝试获取数据并自定义写入缓存,最终返回所需数据
* @param key
* @param customCacheTime
* @param getDataFunction
* @param getDataArgs
* @returns {Promise<*|null>}
*/
export async function getOrSetDataWithCustomCache(key, customCacheTime, getDataFunction, ...getDataArgs) {
const dataFromCache = await getDataFromCache(key)
if (dataFromCache) {
console.log('[缓存-->>API]:', key)
return dataFromCache
}
const data = await getDataFunction(...getDataArgs)
if (data) {
console.log('[API-->>缓存]:', key)
await setDataToCache(key, data, customCacheTime)
}
return data || null
}
import RedisCache from './redis_cache'
/**
* 为减少频繁接口请求notion数据将被缓存
@@ -49,7 +10,7 @@ export async function getOrSetDataWithCustomCache(key, customCacheTime, getDataF
*/
export async function getDataFromCache(key, force) {
if (enableCacheInVercel || BLOG.ENABLE_CACHE || force) {
const dataFromCache = await cacheApi.getCache(key)
const dataFromCache = await getApi().getCache(key)
if (!dataFromCache || JSON.stringify(dataFromCache) === '[]') {
return null
}
@@ -65,15 +26,26 @@ export async function setDataToCache(key, data, customCacheTime) {
return
}
// console.trace('[API-->>缓存写入]:', key)
await cacheApi.setCache(key, data, customCacheTime)
await getApi().setCache(key, data, customCacheTime)
}
export async function delCacheData(key) {
if (!JSON.parse(BLOG.ENABLE_CACHE)) {
return
}
await cacheApi.delCache(key)
await getApi().delCache(key)
}
// 缓存实现类
const cacheApi = process.env.ENABLE_FILE_CACHE ? FileCache : MemoryCache
/**
* 缓存实现类
* @returns
*/
function getApi() {
if (BLOG.REDIS_URL) {
return RedisCache
} else if (process.env.ENABLE_FILE_CACHE) {
return FileCache
} else {
return MemoryCache
}
}

40
lib/cache/redis_cache.js vendored Normal file
View File

@@ -0,0 +1,40 @@
import Redis from 'ioredis'
import BLOG from '@/blog.config'
import { siteConfig } from '@/lib/config'
export const redisClient = new Redis(BLOG.REDIS_URL)
const cacheTime = Math.trunc(
siteConfig('NEXT_REVALIDATE_SECOND', BLOG.NEXT_REVALIDATE_SECOND) * 1.5
)
export async function getCache(key) {
try {
const data = await redisClient.get(key)
return data ? JSON.parse(data) : null
} catch (e) {
console.error('redisClient读取失败 ' + e)
}
}
export async function setCache(key, data, customCacheTime) {
try {
await redisClient.set(
key,
JSON.stringify(data),
'EX',
customCacheTime || cacheTime
)
} catch (e) {
console.error('redisClient写入失败 ' + e)
}
}
export async function delCache(key) {
try {
await redisClient.del(key)
} catch (e) {
console.error('redisClient删除失败 ' + e)
}
}
export default { getCache, setCache, delCache }