优化缓存代码

dependabot/npm_and_yarn/fir_admin/url-parse-1.5.10
MMXX 3 years ago
parent 09257733f8
commit 2b5325ff32
  1. 2
      fir_ser/admin/views/app.py
  2. 2
      fir_ser/admin/views/domain.py
  3. 2
      fir_ser/admin/views/order.py
  4. 2
      fir_ser/admin/views/report.py
  5. 4
      fir_ser/admin/views/storage.py
  6. 2
      fir_ser/admin/views/supersign.py
  7. 2
      fir_ser/admin/views/user.py
  8. 3
      fir_ser/api/base_views.py
  9. 2
      fir_ser/api/models.py
  10. 2
      fir_ser/api/tasks.py
  11. 21
      fir_ser/api/utils/TokenManager.py
  12. 5
      fir_ser/api/utils/app/apputils.py
  13. 4
      fir_ser/api/utils/app/iossignapi.py
  14. 68
      fir_ser/api/utils/app/supersignutils.py
  15. 18
      fir_ser/api/utils/auth.py
  16. 27
      fir_ser/api/utils/crontab/iproxy.py
  17. 2
      fir_ser/api/utils/daobase.py
  18. 16
      fir_ser/api/utils/decorators.py
  19. 2
      fir_ser/api/utils/modelutils.py
  20. 12
      fir_ser/api/utils/mp/wechat.py
  21. 5
      fir_ser/api/utils/serializer.py
  22. 8
      fir_ser/api/utils/storage/aliyunApi.py
  23. 272
      fir_ser/api/utils/storage/caches.py
  24. 4
      fir_ser/api/utils/storage/localApi.py
  25. 4
      fir_ser/api/utils/storage/qiniuApi.py
  26. 58
      fir_ser/api/utils/storage/storage.py
  27. 29
      fir_ser/api/utils/tempcaches.py
  28. 32
      fir_ser/api/utils/utils.py
  29. 4
      fir_ser/api/views/apps.py
  30. 3
      fir_ser/api/views/domain.py
  31. 4
      fir_ser/api/views/download.py
  32. 2
      fir_ser/api/views/login.py
  33. 4
      fir_ser/api/views/logout.py
  34. 3
      fir_ser/api/views/order.py
  35. 2
      fir_ser/api/views/receiveudids.py
  36. 4
      fir_ser/api/views/report.py
  37. 5
      fir_ser/api/views/storage.py
  38. 6
      fir_ser/api/views/supersign.py
  39. 5
      fir_ser/api/views/uploads.py
  40. 18
      fir_ser/common/base/baseutils.py
  41. 65
      fir_ser/common/base/magic.py
  42. 118
      fir_ser/common/cache/invalid.py
  43. 52
      fir_ser/common/cache/state.py
  44. 205
      fir_ser/common/cache/storage.py
  45. 2
      fir_ser/fir_ser/settings.py

@ -14,10 +14,10 @@ from api.base_views import app_delete
from api.models import AppReleaseInfo, Apps from api.models import AppReleaseInfo, Apps
from api.utils.TokenManager import verify_token from api.utils.TokenManager import verify_token
from api.utils.auth import AdminTokenAuthentication from api.utils.auth import AdminTokenAuthentication
from api.utils.baseutils import get_dict_from_filter_fields
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import AdminAppsSerializer, AdminAppReleaseSerializer from api.utils.serializer import AdminAppsSerializer, AdminAppReleaseSerializer
from api.utils.storage.caches import del_cache_response_by_short, get_download_url_by_cache from api.utils.storage.caches import del_cache_response_by_short, get_download_url_by_cache
from common.base.baseutils import get_dict_from_filter_fields
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -12,10 +12,10 @@ from rest_framework.views import APIView
from api.models import UserDomainInfo from api.models import UserDomainInfo
from api.utils.auth import AdminTokenAuthentication from api.utils.auth import AdminTokenAuthentication
from api.utils.baseutils import get_dict_from_filter_fields
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import AdminDomainNameSerializer from api.utils.serializer import AdminDomainNameSerializer
from api.utils.storage.caches import reset_app_wx_easy_type from api.utils.storage.caches import reset_app_wx_easy_type
from common.base.baseutils import get_dict_from_filter_fields
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -12,10 +12,10 @@ from rest_framework.views import APIView
from api.models import UserInfo, Order from api.models import UserInfo, Order
from api.utils.auth import AdminTokenAuthentication from api.utils.auth import AdminTokenAuthentication
from api.utils.baseutils import get_dict_from_filter_fields
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import AdminOrdersSerializer from api.utils.serializer import AdminOrdersSerializer
from api.utils.storage.caches import update_order_info, admin_change_user_download_times from api.utils.storage.caches import update_order_info, admin_change_user_download_times
from common.base.baseutils import get_dict_from_filter_fields
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -12,9 +12,9 @@ from rest_framework.views import APIView
from api.models import AppReportInfo from api.models import AppReportInfo
from api.utils.auth import AdminTokenAuthentication from api.utils.auth import AdminTokenAuthentication
from api.utils.baseutils import get_dict_from_filter_fields
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import AdminAppReportSerializer from api.utils.serializer import AdminAppReportSerializer
from common.base.baseutils import get_dict_from_filter_fields
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -13,10 +13,10 @@ from rest_framework.views import APIView
from api.base_views import storage_change from api.base_views import storage_change
from api.models import UserInfo, AppStorage from api.models import UserInfo, AppStorage
from api.utils.auth import AdminTokenAuthentication from api.utils.auth import AdminTokenAuthentication
from api.utils.baseutils import format_storage_selection
from api.utils.baseutils import get_dict_from_filter_fields
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import AdminStorageSerializer from api.utils.serializer import AdminStorageSerializer
from common.base.baseutils import format_storage_selection
from common.base.baseutils import get_dict_from_filter_fields
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -12,11 +12,11 @@ from rest_framework.views import APIView
from api.models import APPSuperSignUsedInfo, AppIOSDeveloperInfo, IosDeveloperPublicPoolBill from api.models import APPSuperSignUsedInfo, AppIOSDeveloperInfo, IosDeveloperPublicPoolBill
from api.utils.auth import AdminTokenAuthentication from api.utils.auth import AdminTokenAuthentication
from api.utils.baseutils import get_dict_from_filter_fields, get_real_ip_address, get_order_num
from api.utils.modelutils import get_user_public_used_sign_num, get_user_public_sign_num, get_user_obj_from_epu from api.utils.modelutils import get_user_public_used_sign_num, get_user_public_sign_num, get_user_obj_from_epu
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import AdminDeveloperSerializer, AdminSuperSignUsedSerializer, AdminBillInfoSerializer from api.utils.serializer import AdminDeveloperSerializer, AdminSuperSignUsedSerializer, AdminBillInfoSerializer
from api.utils.utils import get_developer_devices from api.utils.utils import get_developer_devices
from common.base.baseutils import get_dict_from_filter_fields, get_real_ip_address, get_order_num
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -12,10 +12,10 @@ from rest_framework.views import APIView
from api.models import UserInfo, UserCertificationInfo, ThirdWeChatUserInfo from api.models import UserInfo, UserCertificationInfo, ThirdWeChatUserInfo
from api.utils.auth import AdminTokenAuthentication from api.utils.auth import AdminTokenAuthentication
from api.utils.baseutils import get_dict_from_filter_fields
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import AdminUserInfoSerializer, AdminUserCertificationSerializer, AdminThirdWxSerializer from api.utils.serializer import AdminUserInfoSerializer, AdminUserCertificationSerializer, AdminThirdWxSerializer
from api.utils.storage.caches import auth_user_download_times_gift from api.utils.storage.caches import auth_user_download_times_gift
from common.base.baseutils import get_dict_from_filter_fields
from fir_ser.settings import AUTH_USER_GIVE_DOWNLOAD_TIMES from fir_ser.settings import AUTH_USER_GIVE_DOWNLOAD_TIMES
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -10,10 +10,11 @@ from api.models import AppReleaseInfo, APPToDeveloper, UserInfo, AppScreenShot,
from api.utils.app.supersignutils import IosUtils from api.utils.app.supersignutils import IosUtils
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.storage.caches import del_cache_response_by_short, del_cache_by_delete_app, \ from api.utils.storage.caches import del_cache_response_by_short, del_cache_by_delete_app, \
del_cache_storage, MigrateStorageState del_cache_storage
from api.utils.storage.storage import Storage from api.utils.storage.storage import Storage
from api.utils.utils import delete_local_files, delete_app_screenshots_files, change_storage_and_change_head_img, \ from api.utils.utils import delete_local_files, delete_app_screenshots_files, change_storage_and_change_head_img, \
migrating_storage_data, clean_storage_data, check_storage_is_new_storage migrating_storage_data, clean_storage_data, check_storage_is_new_storage
from common.cache.state import MigrateStorageState
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -4,9 +4,9 @@ from django.db import models
from django.db.models import Count from django.db.models import Count
from api.utils.TokenManager import generate_alphanumeric_token_of_length, generate_numeric_token_of_length from api.utils.TokenManager import generate_alphanumeric_token_of_length, generate_numeric_token_of_length
from api.utils.baseutils import make_random_uuid
######################################## 用户表 ######################################## ######################################## 用户表 ########################################
from api.utils.daobase import AESCharField from api.utils.daobase import AESCharField
from common.base.baseutils import make_random_uuid
# Create your models here. # Create your models here.

@ -16,9 +16,9 @@ from api.utils.crontab.ctasks import sync_download_times, auto_clean_upload_tmp_
from api.utils.crontab.iproxy import get_best_proxy_ips, clean_ip_proxy_infos from api.utils.crontab.iproxy import get_best_proxy_ips, clean_ip_proxy_infos
from api.utils.geetest.geetest_utils import check_bypass_status from api.utils.geetest.geetest_utils import check_bypass_status
from api.utils.mp.wechat import sync_wx_access_token from api.utils.mp.wechat import sync_wx_access_token
from api.utils.storage.caches import MigrateStorageState
from api.utils.storage.storage import get_local_storage from api.utils.storage.storage import get_local_storage
from api.views.login import get_login_type from api.views.login import get_login_type
from common.cache.state import MigrateStorageState
from fir_ser.celery import app from fir_ser.celery import app
from fir_ser.settings import LOGIN, CHANGER, REGISTER from fir_ser.settings import LOGIN, CHANGER, REGISTER

@ -9,16 +9,14 @@ import string
import time import time
import uuid import uuid
from django.core.cache import cache from common.cache.storage import TokenManagerCache, RedisCacheBase
from fir_ser.settings import CACHE_KEY_TEMPLATE
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_token(release_id, time_limit=60, key='', force_new=False): def make_token(release_id, time_limit=60, key='', force_new=False):
token_key = "_".join([key.lower(), CACHE_KEY_TEMPLATE.get("make_token_key"), release_id]) token_cache = TokenManagerCache(key, release_id)
token = cache.get(token_key) token_key, token = token_cache.get_storage_key_and_cache()
if token and not force_new: if token and not force_new:
logger.debug( logger.debug(
f"make_token cache exists get token:{token} release_id:{release_id} force_new:{force_new} token_key:{token_key}") f"make_token cache exists get token:{token} release_id:{release_id} force_new:{force_new} token_key:{token_key}")
@ -28,11 +26,15 @@ def make_token(release_id, time_limit=60, key='', force_new=False):
user_ran_str = uuid.uuid5(uuid.NAMESPACE_DNS, release_id).__str__().split("-") user_ran_str = uuid.uuid5(uuid.NAMESPACE_DNS, release_id).__str__().split("-")
user_ran_str.extend(random_str) user_ran_str.extend(random_str)
token = "".join(user_ran_str) token = "".join(user_ran_str)
cache.set(token, { token_cache.set_storage_cache({
"atime": time.time() + time_limit,
"data": release_id
}, time_limit)
RedisCacheBase(token).set_storage_cache({
"atime": time.time() + time_limit, "atime": time.time() + time_limit,
"data": release_id "data": release_id
}, time_limit) }, time_limit)
cache.set(token_key, token, time_limit - 1) token_cache.set_storage_cache(token, time_limit - 1)
logger.debug( logger.debug(
f"make_token cache not exists get token:{token} release_id:{release_id} force_new:{force_new} token_key:{token_key}") f"make_token cache not exists get token:{token} release_id:{release_id} force_new:{force_new} token_key:{token_key}")
return token return token
@ -40,11 +42,12 @@ def make_token(release_id, time_limit=60, key='', force_new=False):
def verify_token(token, release_id, success_once=False): def verify_token(token, release_id, success_once=False):
try: try:
values = cache.get(token) token_cache = RedisCacheBase(token)
token, values = token_cache.get_storage_key_and_cache()
if values and release_id == values.get("data", None): if values and release_id == values.get("data", None):
logger.debug(f"verify_token token:{token} release_id:{release_id} success") logger.debug(f"verify_token token:{token} release_id:{release_id} success")
if success_once: if success_once:
cache.delete(token) token_cache.del_storage_cache()
return True return True
except Exception as e: except Exception as e:
logger.error(f"verify_token token:{token} release_id:{release_id} failed Exception:{e}") logger.error(f"verify_token token:{token} release_id:{release_id} failed Exception:{e}")

@ -7,10 +7,11 @@ import logging
import random import random
from api.models import AppReleaseInfo, Apps from api.models import AppReleaseInfo, Apps
from api.utils.baseutils import make_app_uuid
from api.utils.modelutils import get_user_domain_name from api.utils.modelutils import get_user_domain_name
from api.utils.storage.caches import del_cache_response_by_short, MigrateStorageState from api.utils.storage.caches import del_cache_response_by_short
from api.utils.storage.storage import Storage from api.utils.storage.storage import Storage
from common.base.baseutils import make_app_uuid
from common.cache.state import MigrateStorageState
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -14,8 +14,8 @@ from OpenSSL.crypto import (load_pkcs12, dump_certificate_request, dump_privatek
from api.utils.app.shellcmds import shell_command, use_user_pass from api.utils.app.shellcmds import shell_command, use_user_pass
from api.utils.apple.appleapiv3 import AppStoreConnectApi from api.utils.apple.appleapiv3 import AppStoreConnectApi
from api.utils.baseutils import get_format_time, format_apple_date, make_app_uuid from common.base.baseutils import get_format_time, format_apple_date, make_app_uuid
from api.utils.storage.caches import CleanErrorBundleIdSignDataState from common.cache.state import CleanErrorBundleIdSignDataState
from fir_ser.settings import SUPER_SIGN_ROOT from fir_ser.settings import SUPER_SIGN_ROOT
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -18,18 +18,19 @@ from django.db.models import Count, F
from api.models import APPSuperSignUsedInfo, AppUDID, AppIOSDeveloperInfo, AppReleaseInfo, Apps, APPToDeveloper, \ from api.models import APPSuperSignUsedInfo, AppUDID, AppIOSDeveloperInfo, AppReleaseInfo, Apps, APPToDeveloper, \
UDIDsyncDeveloper, DeveloperAppID, DeveloperDevicesID, IosDeveloperPublicPoolBill, UserInfo, AppleDeveloperToAppUse UDIDsyncDeveloper, DeveloperAppID, DeveloperDevicesID, IosDeveloperPublicPoolBill, UserInfo, AppleDeveloperToAppUse
from api.utils.app.iossignapi import ResignApp, AppDeveloperApiV2 from api.utils.app.iossignapi import ResignApp, AppDeveloperApiV2
from api.utils.baseutils import file_format_path, delete_app_profile_file, get_profile_full_path, format_apple_date, \
get_format_time, make_app_uuid, make_from_user_uuid
from api.utils.modelutils import get_ios_developer_public_num, check_ipa_is_latest_sign, \ from api.utils.modelutils import get_ios_developer_public_num, check_ipa_is_latest_sign, \
get_developer_can_used_from_public_sign, update_or_create_developer_udid_info get_developer_can_used_from_public_sign, update_or_create_developer_udid_info
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import BillAppInfoSerializer, BillDeveloperInfoSerializer from api.utils.serializer import BillAppInfoSerializer, BillDeveloperInfoSerializer
from api.utils.storage.caches import del_cache_response_by_short, send_msg_over_limit, check_app_permission, \ from api.utils.storage.caches import del_cache_response_by_short, send_msg_over_limit, check_app_permission, \
consume_user_download_times_by_app_obj, add_udid_cache_queue, get_and_clean_udid_cache_queue, \ consume_user_download_times_by_app_obj, add_udid_cache_queue, get_and_clean_udid_cache_queue
CleanErrorBundleIdSignDataState
from api.utils.storage.storage import Storage from api.utils.storage.storage import Storage
from api.utils.utils import delete_app_to_dev_and_file, send_ios_developer_active_status, delete_local_files, \ from api.utils.utils import delete_app_to_dev_and_file, send_ios_developer_active_status, delete_local_files, \
download_files_form_oss, get_developer_udided download_files_form_oss, get_developer_udided
from common.base.baseutils import file_format_path, delete_app_profile_file, get_profile_full_path, format_apple_date, \
get_format_time, make_app_uuid, make_from_user_uuid
from common.base.magic import run_function_by_locker, call_function_try_attempts
from common.cache.state import CleanErrorBundleIdSignDataState
from fir_ser.settings import SUPER_SIGN_ROOT, MEDIA_ROOT, MOBILE_CONFIG_SIGN_SSL, MSGTEMPLATE from fir_ser.settings import SUPER_SIGN_ROOT, MEDIA_ROOT, MOBILE_CONFIG_SIGN_SSL, MSGTEMPLATE
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -66,7 +67,7 @@ def resign_by_app_id_and_developer(app_id, developer_id, developer_app_id, need_
status = True status = True
if status: if status:
locker = { locker = {
'locker_key': "%s_%s_%s" % ('run_sign', app_obj.app_id, developer_obj.issuer_id), 'locker_key': f"run_sign_{app_obj.app_id}_{developer_obj.issuer_id}",
"timeout": 60 * 5} "timeout": 60 * 5}
status, result = IosUtils.run_sign(app_obj.user_id, app_obj, developer_obj, d_time, [], locker=locker) status, result = IosUtils.run_sign(app_obj.user_id, app_obj, developer_obj, d_time, [], locker=locker)
return status, {'developer_id': developer_obj.issuer_id, 'result': (status, result)} return status, {'developer_id': developer_obj.issuer_id, 'result': (status, result)}
@ -219,57 +220,6 @@ def disable_developer_and_send_email(app_obj, developer_obj):
developer_obj.issuer_id)) developer_obj.issuer_id))
def call_function_try_attempts(try_attempts=3, sleep_time=2, failed_callback=None):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
res = False, {}
start_time = time.time()
for i in range(try_attempts):
res = func(*args, **kwargs)
status, result = res
if status:
return res
else:
logger.warning(f'exec {func} failed. {try_attempts} times in total. now {sleep_time} later try '
f'again...{i}')
time.sleep(sleep_time)
if not res[0]:
logger.error(f'exec {func} failed after the maximum number of attempts. Failed:{res[1]}')
if failed_callback:
failed_callback()
logger.info(f"exec {func} finished. time:{time.time() - start_time}")
return res
return wrapper
return decorator
def run_function_by_lock(lock_key='', timeout=60 * 5):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
start_time = time.time()
locker = kwargs.get('locker', {})
if locker:
kwargs.pop('locker')
new_lock_key = locker.get('lock_key', lock_key)
new_timeout = locker.get('timeout', timeout)
if locker and new_timeout and new_lock_key:
with cache.lock(new_lock_key, timeout=new_timeout):
logger.info(f"exec {func} start. time:{time.time()}")
res = func(*args, **kwargs)
else:
res = func(*args, **kwargs)
logger.info(f"exec {func} finished. time:{time.time() - start_time}")
return res
return wrapper
return decorator
def get_new_developer_by_app_obj(user_objs, app_obj, apple_to_app=False): def get_new_developer_by_app_obj(user_objs, app_obj, apple_to_app=False):
can_used_developer_pk_list = [] can_used_developer_pk_list = []
obj_base_filter = AppIOSDeveloperInfo.objects.filter(user_id__in=user_objs, is_actived=True, certid__isnull=False) obj_base_filter = AppIOSDeveloperInfo.objects.filter(user_id__in=user_objs, is_actived=True, certid__isnull=False)
@ -578,7 +528,7 @@ class IosUtils(object):
sign_flag = True sign_flag = True
if sign_flag: if sign_flag:
locker = { locker = {
'locker_key': "%s_%s_%s" % ('run_sign', self.app_obj.app_id, self.developer_obj.issuer_id), 'locker_key': f"run_sign_{self.app_obj.app_id}_{self.developer_obj.issuer_id}",
"timeout": 60 * 5} "timeout": 60 * 5}
logger.info("start run_sign ...") logger.info("start run_sign ...")
return IosUtils.run_sign(self.user_obj, self.app_obj, self.developer_obj, return IosUtils.run_sign(self.user_obj, self.app_obj, self.developer_obj,
@ -817,7 +767,7 @@ class IosUtils(object):
if 'continue' in [str(did_udid_result), str(download_profile_result)]: if 'continue' in [str(did_udid_result), str(download_profile_result)]:
return True, True return True, True
locker = { locker = {
'locker_key': "%s_%s_%s" % ('run_sign', self.app_obj.app_id, self.developer_obj.issuer_id), 'locker_key': f"run_sign_{self.app_obj.app_id}_{self.developer_obj.issuer_id}",
"timeout": 60 * 5} "timeout": 60 * 5}
return IosUtils.run_sign(self.user_obj, self.app_obj, self.developer_obj, return IosUtils.run_sign(self.user_obj, self.app_obj, self.developer_obj,
start_time, start_time,
@ -831,7 +781,7 @@ class IosUtils(object):
return True, True return True, True
@staticmethod @staticmethod
@run_function_by_lock() @run_function_by_locker()
def run_sign(user_obj, app_obj, developer_obj, d_time, udid_list=None): def run_sign(user_obj, app_obj, developer_obj, d_time, udid_list=None):
if udid_list is None: if udid_list is None:
udid_list = [] udid_list = []

@ -5,14 +5,13 @@
# date: 2020/9/24 # date: 2020/9/24
import base64 import base64
from django.core.cache import cache
from django.http.cookie import parse_cookie from django.http.cookie import parse_cookie
from rest_framework.authentication import BaseAuthentication from rest_framework.authentication import BaseAuthentication
from rest_framework.exceptions import AuthenticationFailed from rest_framework.exceptions import AuthenticationFailed
from rest_framework.permissions import BasePermission from rest_framework.permissions import BasePermission
from api.models import UserInfo from api.models import UserInfo
from fir_ser.settings import CACHE_KEY_TEMPLATE from common.cache.storage import UserTokenCache
def get_cookie_token(request): def get_cookie_token(request):
@ -52,20 +51,19 @@ def get_user_from_request_auth(request):
if not auth_token: if not auth_token:
raise AuthenticationFailed({"code": 1001, "error": "缺少token"}) raise AuthenticationFailed({"code": 1001, "error": "缺少token"})
auth_key = "_".join([CACHE_KEY_TEMPLATE.get('user_auth_token_key'), auth_token]) auth_cache = UserTokenCache(auth_token)
userinfo = auth_cache.get_storage_cache()
cacheuserinfo = cache.get(auth_key) if not userinfo:
if not cacheuserinfo:
raise AuthenticationFailed({"code": 1001, "error": "无效的token"}) raise AuthenticationFailed({"code": 1001, "error": "无效的token"})
if user_name != cacheuserinfo.get('username', None): if user_name != userinfo.get('username', None):
raise AuthenticationFailed({"code": 1001, "error": "token校验失败"}) raise AuthenticationFailed({"code": 1001, "error": "token校验失败"})
user_obj = UserInfo.objects.filter(uid=cacheuserinfo.get('uid', None), user_obj = UserInfo.objects.filter(uid=userinfo.get('uid', None),
username=cacheuserinfo.get("username")).first() username=userinfo.get("username")).first()
if not user_obj: if not user_obj:
raise AuthenticationFailed({"code": 1001, "error": "无效的token"}) raise AuthenticationFailed({"code": 1001, "error": "无效的token"})
if user_obj.is_active: if user_obj.is_active:
cache.set(auth_key, cacheuserinfo, 3600 * 24 * 7) auth_cache.set_storage_cache(userinfo, 3600 * 24 * 7)
return user_obj, auth_token return user_obj, auth_token
else: else:
raise AuthenticationFailed({"code": 1001, "error": "用户被禁用"}) raise AuthenticationFailed({"code": 1001, "error": "用户被禁用"})

@ -9,9 +9,9 @@ import time
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
import requests import requests
from django.core.cache import cache
from fir_ser.settings import CACHE_KEY_TEMPLATE, APPLE_DEVELOPER_API_PROXY_LIST, APPLE_DEVELOPER_API_PROXY from common.cache.storage import IpProxyListCache, IpProxyActiveCache
from fir_ser.settings import APPLE_DEVELOPER_API_PROXY_LIST, APPLE_DEVELOPER_API_PROXY
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -38,21 +38,20 @@ def get_best_proxy_ips(url='https://api.appstoreconnect.apple.com/agreement'):
pools.submit(task, proxy_ip) pools.submit(task, proxy_ip)
pools.shutdown() pools.shutdown()
best_sorted_ips = sorted(access_ip_info, key=lambda x: x.get('time'))[:8] best_sorted_ips = sorted(access_ip_info, key=lambda x: x.get('time'))[:8]
ip_proxy_store_key = CACHE_KEY_TEMPLATE.get("ip_proxy_store_list_key")
best_sorted_ips = [ip_proxy['ip'] for ip_proxy in best_sorted_ips] best_sorted_ips = [ip_proxy['ip'] for ip_proxy in best_sorted_ips]
cache.set(ip_proxy_store_key, best_sorted_ips, 24 * 60 * 60) IpProxyListCache().set_storage_cache(best_sorted_ips, 24 * 60 * 60)
return best_sorted_ips return best_sorted_ips
def get_proxy_ip_from_cache(change_ip=False): def get_proxy_ip_from_cache(change_ip=False):
ip_proxy_store_key = CACHE_KEY_TEMPLATE.get("ip_proxy_store_list_key") active_proxy_cache = IpProxyActiveCache()
ip_proxy_store_active_key = CACHE_KEY_TEMPLATE.get("ip_proxy_store_active_key") active_ip_proxy = active_proxy_cache.get_storage_cache()
active_ip_proxy = cache.get(ip_proxy_store_active_key)
if not change_ip and active_ip_proxy: if not change_ip and active_ip_proxy:
logger.info(f"get ip proxy cache {active_ip_proxy}") logger.info(f"get ip proxy cache {active_ip_proxy}")
return active_ip_proxy return active_ip_proxy
ip_proxy_result = cache.get(ip_proxy_store_key) list_proxy_cache = IpProxyListCache()
ip_proxy_result = list_proxy_cache.get_storage_cache()
if not ip_proxy_result: if not ip_proxy_result:
ip_proxy_result = get_best_proxy_ips() ip_proxy_result = get_best_proxy_ips()
@ -62,8 +61,8 @@ def get_proxy_ip_from_cache(change_ip=False):
except Exception as e: except Exception as e:
logger.warning(f'remove bad ip proxy failed {e}') logger.warning(f'remove bad ip proxy failed {e}')
logger.error(f"remove bad ip proxy {active_ip_proxy}") logger.error(f"remove bad ip proxy {active_ip_proxy}")
cache.delete(ip_proxy_store_active_key) active_proxy_cache.del_storage_cache()
cache.set(ip_proxy_store_key, ip_proxy_result, 24 * 60 * 60) list_proxy_cache.set_storage_cache(ip_proxy_result, 24 * 60 * 60)
if ip_proxy_result and len(ip_proxy_result) > 0: if ip_proxy_result and len(ip_proxy_result) > 0:
proxy_ip = ip_proxy_result[random.randint(0, 2 if len(ip_proxy_result) > 2 else len(ip_proxy_result) - 1)] proxy_ip = ip_proxy_result[random.randint(0, 2 if len(ip_proxy_result) > 2 else len(ip_proxy_result) - 1)]
@ -74,13 +73,11 @@ def get_proxy_ip_from_cache(change_ip=False):
else: else:
proxy_info = APPLE_DEVELOPER_API_PROXY proxy_info = APPLE_DEVELOPER_API_PROXY
logger.info(f"make ip proxy cache {proxy_info}") logger.info(f"make ip proxy cache {proxy_info}")
cache.set(ip_proxy_store_active_key, proxy_info, 24 * 60 * 60) active_proxy_cache.set_storage_cache(proxy_info, 24 * 60 * 60)
return proxy_info return proxy_info
def clean_ip_proxy_infos(): def clean_ip_proxy_infos():
logger.info("clean ip proxy infos") logger.info("clean ip proxy infos")
ip_proxy_store_key = CACHE_KEY_TEMPLATE.get("ip_proxy_store_list_key") IpProxyListCache().del_storage_cache()
ip_proxy_store_active_key = CACHE_KEY_TEMPLATE.get("ip_proxy_store_active_key") IpProxyActiveCache().del_storage_cache()
cache.delete(ip_proxy_store_key)
cache.delete(ip_proxy_store_active_key)

@ -6,7 +6,7 @@
from django.conf import settings from django.conf import settings
from django.db import models from django.db import models
from api.utils.baseutils import AESCipher from common.base.baseutils import AESCipher
class AESCharField(models.CharField): class AESCharField(models.CharField):

@ -2,12 +2,26 @@ from functools import wraps, WRAPPER_ASSIGNMENTS
from django.http.response import HttpResponse from django.http.response import HttpResponse
from common.cache.storage import AppDownloadShortShowCache
def get_cache(alias): def get_cache(alias):
from django.core.cache import caches from django.core.cache import caches
return caches[alias] return caches[alias]
def set_short_show_cache(short, cache_key):
short_show_cache = AppDownloadShortShowCache("ShortDownloadView".lower(), short)
key_list = short_show_cache.get_storage_cache()
if key_list and isinstance(key_list, list):
key_list.append(cache_key)
key_list = list(set(key_list))
else:
key_list = [cache_key]
short_show_cache.set_storage_cache(key_list, 600)
class CacheResponse: class CacheResponse:
""" """
Store/Receive and return cached `HttpResponse` based on DRF response. Store/Receive and return cached `HttpResponse` based on DRF response.
@ -91,6 +105,8 @@ class CacheResponse:
response.status_code, response.status_code,
headers headers
) )
short = kwargs.get("short", '')
set_short_show_cache(short, key)
self.cache.set(key, response_triple, timeout) self.cache.set(key, response_triple, timeout)
else: else:
# build smaller Django HttpResponse # build smaller Django HttpResponse

@ -13,7 +13,7 @@ from django.db.models import Count, Sum, Q
from api.models import AppReleaseInfo, UserDomainInfo, DomainCnameInfo, UserAdDisplayInfo, RemoteClientInfo, \ from api.models import AppReleaseInfo, UserDomainInfo, DomainCnameInfo, UserAdDisplayInfo, RemoteClientInfo, \
AppIOSDeveloperInfo, IosDeveloperPublicPoolBill, APPToDeveloper, UserInfo, UDIDsyncDeveloper AppIOSDeveloperInfo, IosDeveloperPublicPoolBill, APPToDeveloper, UserInfo, UDIDsyncDeveloper
from api.utils.baseutils import get_server_domain_from_request, get_user_default_domain_name, get_real_ip_address, \ from common.base.baseutils import get_server_domain_from_request, get_user_default_domain_name, get_real_ip_address, \
get_origin_domain_name, is_valid_phone get_origin_domain_name, is_valid_phone
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -8,10 +8,10 @@ import logging
from hashlib import sha1 from hashlib import sha1
import requests import requests
from django.core.cache import cache
from api.utils.baseutils import get_format_time
from api.utils.mp.utils import WxMsgCryptBase from api.utils.mp.utils import WxMsgCryptBase
from common.base.baseutils import get_format_time
from common.cache.storage import WxTokenCache
from fir_ser.settings import THIRDLOGINCONF, CACHE_KEY_TEMPLATE from fir_ser.settings import THIRDLOGINCONF, CACHE_KEY_TEMPLATE
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -29,15 +29,15 @@ def format_req_json(j_data, func, *args, **kwargs):
def sync_wx_access_token(force=False): def sync_wx_access_token(force=False):
wx_access_token_key = CACHE_KEY_TEMPLATE.get("wx_access_token_key") wx_cache = WxTokenCache()
access_token_info = cache.get(wx_access_token_key) access_token_info = wx_cache.get_storage_cache()
if not access_token_info or force: if not access_token_info or force:
access_token_info = WxOfficialBase.make_wx_auth_obj().get_access_token() access_token_info = WxOfficialBase.make_wx_auth_obj().get_access_token()
if access_token_info.get('errcode', -1) in [40013] or 'invalid appid' in access_token_info.get('errmsg', ''): if access_token_info.get('errcode', -1) in [40013] or 'invalid appid' in access_token_info.get('errmsg', ''):
return False, access_token_info return False, access_token_info
expires_in = access_token_info.get('expires_in') expires_in = access_token_info.get('expires_in')
if expires_in: if expires_in:
cache.set(wx_access_token_key, access_token_info, expires_in - 60) wx_cache.set_storage_cache(access_token_info, expires_in - 60)
return True, access_token_info return True, access_token_info
@ -45,7 +45,7 @@ def get_wx_access_token_cache(c_count=1, ):
if c_count > 5: if c_count > 5:
return '' return ''
wx_access_token_key = CACHE_KEY_TEMPLATE.get("wx_access_token_key") wx_access_token_key = CACHE_KEY_TEMPLATE.get("wx_access_token_key")
access_token = cache.get(wx_access_token_key) access_token = WxTokenCache().get_storage_cache()
if access_token: if access_token:
return access_token.get('access_token') return access_token.get('access_token')
status, result = sync_wx_access_token(True) status, result = sync_wx_access_token(True)

@ -10,7 +10,9 @@ from api.utils.app.apputils import bytes2human
from api.utils.modelutils import get_user_domain_name, get_app_domain_name, get_app_download_uri from api.utils.modelutils import get_user_domain_name, get_app_domain_name, get_app_download_uri
from api.utils.storage.caches import get_user_free_download_times, get_user_cert_auth_status from api.utils.storage.caches import get_user_free_download_times, get_user_cert_auth_status
from api.utils.storage.storage import Storage from api.utils.storage.storage import Storage
from api.utils.utils import get_developer_udided, get_choices_dict, get_choices_name_from_key from api.utils.utils import get_developer_udided
from common.base.baseutils import get_choices_dict, get_choices_name_from_key
from common.cache.storage import AdPicShowCache
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -808,6 +810,7 @@ class AppAdInfoSerializer(UserAdInfoSerializer):
ad_pic = serializers.SerializerMethodField() ad_pic = serializers.SerializerMethodField()
def get_ad_pic(self, obj): def get_ad_pic(self, obj):
AdPicShowCache(self.context.get("key", ''), self.context.get("short", '')).set_storage_cache(obj.ad_pic)
return get_download_url_from_context(self, obj, '', obj.ad_pic, True) return get_download_url_from_context(self, obj, '', obj.ad_pic, True)

@ -49,9 +49,7 @@ def md5sum(src):
class AliYunCdn(object): class AliYunCdn(object):
def __init__(self, key, is_https, domain_name): def __init__(self, key, is_https, domain_name):
uri = 'http://' uri = 'https://' if is_https else 'http://'
if is_https:
uri = 'https://'
self.domain = uri + domain_name self.domain = uri + domain_name
self.key = key self.key = key
@ -172,9 +170,7 @@ class AliYunOss(object):
return self.fetch_sts_token(name, expires, only_put=True).__dict__ return self.fetch_sts_token(name, expires, only_put=True).__dict__
def make_auth_bucket(self, name, expires, only_get=False): def make_auth_bucket(self, name, expires, only_get=False):
uri = 'http://' uri = 'https://' if self.is_https else 'http://'
if self.is_https:
uri = 'https://'
url = self.endpoint url = self.endpoint
is_cname = False is_cname = False
if self.domain_name and self.download_auth_type == 1: if self.domain_name and self.download_auth_type == 1:

@ -14,10 +14,15 @@ from django.utils import timezone
from api.models import Apps, UserInfo, AppReleaseInfo, AppUDID, APPToDeveloper, APPSuperSignUsedInfo, \ from api.models import Apps, UserInfo, AppReleaseInfo, AppUDID, APPToDeveloper, APPSuperSignUsedInfo, \
UserCertificationInfo, Order UserCertificationInfo, Order
from api.utils.baseutils import check_app_password, get_order_num, get_real_ip_address
from api.utils.modelutils import get_app_d_count_by_app_id, get_app_domain_name, get_user_domain_name, \ from api.utils.modelutils import get_app_d_count_by_app_id, get_app_domain_name, get_user_domain_name, \
add_remote_info_from_request add_remote_info_from_request
from api.utils.storage.storage import Storage, LocalStorage from api.utils.storage.storage import Storage, LocalStorage
from common.base.baseutils import check_app_password, get_order_num, get_real_ip_address
from common.cache.invalid import invalid_app_cache, invalid_short_cache, invalid_app_download_times_cache, \
invalid_head_img_cache
from common.cache.storage import AppDownloadTodayTimesCache, AppDownloadTimesCache, DownloadUrlCache, AppInstanceCache, \
UploadTmpFileNameCache, RedisCacheBase, UserCanDownloadCache, UserFreeDownloadTimesCache, WxTicketCache, \
SignUdidQueueCache, CloudStorageCache
from fir_ser.settings import CACHE_KEY_TEMPLATE, SERVER_DOMAIN, SYNC_CACHE_TO_DATABASE, DEFAULT_MOBILEPROVISION, \ from fir_ser.settings import CACHE_KEY_TEMPLATE, SERVER_DOMAIN, SYNC_CACHE_TO_DATABASE, DEFAULT_MOBILEPROVISION, \
USER_FREE_DOWNLOAD_TIMES, AUTH_USER_FREE_DOWNLOAD_TIMES USER_FREE_DOWNLOAD_TIMES, AUTH_USER_FREE_DOWNLOAD_TIMES
@ -25,12 +30,7 @@ logger = logging.getLogger(__name__)
def sync_download_times_by_app_id(app_ids): def sync_download_times_by_app_id(app_ids):
app_id_lists = [] for k, v in AppDownloadTimesCache(app_ids).get_many().items():
for app_id in app_ids:
down_tem_key = "_".join([CACHE_KEY_TEMPLATE.get("download_times_key"), app_id.get("app_id")])
app_id_lists.append(down_tem_key)
down_times_lists = cache.get_many(app_id_lists)
for k, v in down_times_lists.items():
app_id = k.split(CACHE_KEY_TEMPLATE.get("download_times_key"))[1].strip('_') app_id = k.split(CACHE_KEY_TEMPLATE.get("download_times_key"))[1].strip('_')
Apps.objects.filter(app_id=app_id).update(count_hits=v) Apps.objects.filter(app_id=app_id).update(count_hits=v)
logger.info(f"sync_download_times_by_app_id app_id:{app_id} count_hits:{v}") logger.info(f"sync_download_times_by_app_id app_id:{app_id} count_hits:{v}")
@ -88,8 +88,7 @@ def get_download_url_by_cache(app_obj, filename, limit, isdownload=True, key='',
mobileconifg = local_storage.get_download_url(filename.split(".")[0] + "." + "mobileprovision", limit) mobileconifg = local_storage.get_download_url(filename.split(".")[0] + "." + "mobileprovision", limit)
return local_storage.get_download_url(filename.split(".")[0] + "." + download_url_type, limit), mobileconifg return local_storage.get_download_url(filename.split(".")[0] + "." + download_url_type, limit), mobileconifg
down_key = "_".join([key.lower(), CACHE_KEY_TEMPLATE.get('download_url_key'), filename]) download_val = DownloadUrlCache(key, filename).get_storage_cache()
download_val = cache.get(down_key)
if download_val: if download_val:
if download_val.get("time") > now - 60: if download_val.get("time") > now - 60:
return download_val.get("download_url"), "" return download_val.get("download_url"), ""
@ -109,15 +108,15 @@ def get_app_instance_by_cache(app_id, password, limit, udid):
if not check_app_password(app_password, password): if not check_app_password(app_password, password):
return None return None
return app_info return app_info
app_key = "_".join([CACHE_KEY_TEMPLATE.get("app_instance_key"), app_id]) app_instance_cache = AppInstanceCache(app_id)
app_obj_cache = cache.get(app_key) app_obj_cache = app_instance_cache.get_storage_cache()
if not app_obj_cache: if not app_obj_cache:
app_obj_cache = Apps.objects.filter(app_id=app_id).values("pk", 'user_id', 'type', 'password', app_obj_cache = Apps.objects.filter(app_id=app_id).values("pk", 'user_id', 'type', 'password',
'issupersign', 'issupersign',
'user_id__certification__status').first() 'user_id__certification__status').first()
if app_obj_cache: if app_obj_cache:
app_obj_cache['d_count'] = get_app_d_count_by_app_id(app_id) app_obj_cache['d_count'] = get_app_d_count_by_app_id(app_id)
cache.set(app_key, app_obj_cache, limit) app_instance_cache.set_storage_cache(app_obj_cache, limit)
if not app_obj_cache: if not app_obj_cache:
return None return None
app_password = app_obj_cache.get("password") app_password = app_obj_cache.get("password")
@ -129,79 +128,71 @@ def get_app_instance_by_cache(app_id, password, limit, udid):
def set_app_download_by_cache(app_id, limit=900): def set_app_download_by_cache(app_id, limit=900):
down_tem_key = "_".join([CACHE_KEY_TEMPLATE.get("download_times_key"), app_id]) app_download_cache = AppDownloadTimesCache(app_id)
download_times = cache.get(down_tem_key) download_times = app_download_cache.get_storage_cache()
if not download_times: if not download_times:
download_times = Apps.objects.filter(app_id=app_id).values("count_hits").first().get('count_hits') download_times = Apps.objects.filter(app_id=app_id).values("count_hits").first().get('count_hits')
cache.set(down_tem_key, download_times + 1, limit) app_download_cache.set_storage_cache(download_times + 1, limit)
else: else:
cache.incr(down_tem_key) app_download_cache.incr()
cache.expire(down_tem_key, timeout=limit) app_download_cache.expire(limit)
set_app_today_download_times(app_id) set_app_today_download_times(app_id)
return download_times + 1 return download_times + 1
def del_cache_response_by_short(app_id, udid=''): def del_cache_response_by_short(app_id, udid=''):
apps_dict = Apps.objects.filter(app_id=app_id).values("id", "short", "app_id", "has_combo").first() app_obj = Apps.objects.filter(app_id=app_id).first()
if apps_dict: invalid_app_cache(app_obj)
del_cache_response_by_short_util(apps_dict.get("short"), apps_dict.get("app_id"), udid) invalid_app_cache(app_obj.has_combo)
if apps_dict.get("has_combo"): # apps_dict = Apps.objects.filter(app_id=app_id).values("id", "short", "app_id", "has_combo").first()
combo_dict = Apps.objects.filter(pk=apps_dict.get("has_combo")).values("id", "short", "app_id").first() # if apps_dict:
if combo_dict: # del_cache_response_by_short_util(apps_dict.get("short"), apps_dict.get("app_id"), udid)
del_cache_response_by_short_util(combo_dict.get("short"), combo_dict.get("app_id"), udid) # if apps_dict.get("has_combo"):
# combo_dict = Apps.objects.filter(pk=apps_dict.get("has_combo")).values("id", "short", "app_id").first()
# if combo_dict:
def del_short_cache(short): # del_cache_response_by_short_util(combo_dict.get("short"), combo_dict.get("app_id"), udid)
key = "_".join([CACHE_KEY_TEMPLATE.get("download_short_key"), short, '*'])
for app_download_key in cache.iter_keys(key):
cache.delete(app_download_key) # def del_short_cache(short):
# key = "_".join([CACHE_KEY_TEMPLATE.get("download_short_key"), short, '*'])
# for app_download_key in cache.iter_keys(key):
def del_make_token_key_cache(release_id): # cache.delete(app_download_key)
key = "_".join(['', CACHE_KEY_TEMPLATE.get("make_token_key"), f"{release_id}*"])
for make_token_key in cache.iter_keys(key):
cache.delete(make_token_key) # def del_make_token_key_cache(release_id):
# key = "_".join(['', CACHE_KEY_TEMPLATE.get("make_token_key"), f"{release_id}*"])
# for make_token_key in cache.iter_keys(key):
def del_cache_response_by_short_util(short, app_id, udid): # cache.delete(make_token_key)
logger.info(f"del_cache_response_by_short short:{short} app_id:{app_id} udid:{udid}")
del_short_cache(short)
# def del_cache_response_by_short_util(short, app_id, udid):
cache.delete("_".join([CACHE_KEY_TEMPLATE.get("app_instance_key"), app_id])) # logger.info(f"del_cache_response_by_short short:{short} app_id:{app_id} udid:{udid}")
# del_short_cache(short)
key = 'ShortDownloadView'.lower() #
master_release_dict = AppReleaseInfo.objects.filter(app_id__app_id=app_id, is_master=True).values('icon_url', # cache.delete("_".join([CACHE_KEY_TEMPLATE.get("app_instance_key"), app_id]))
'release_id').first() #
if master_release_dict: # key = 'ShortDownloadView'.lower()
download_val = CACHE_KEY_TEMPLATE.get('download_url_key') # master_release_dict = AppReleaseInfo.objects.filter(app_id__app_id=app_id, is_master=True).values('icon_url',
cache.delete("_".join([key, download_val, os.path.basename(master_release_dict.get("icon_url")), udid])) # 'release_id').first()
cache.delete("_".join([key, download_val, master_release_dict.get('release_id'), udid])) # if master_release_dict:
cache.delete( # download_val = CACHE_KEY_TEMPLATE.get('download_url_key')
"_".join([key, CACHE_KEY_TEMPLATE.get("make_token_key"), master_release_dict.get('release_id'), udid])) # cache.delete("_".join([key, download_val, os.path.basename(master_release_dict.get("icon_url")), udid]))
del_make_token_key_cache(master_release_dict.get('release_id')) # cache.delete("_".join([key, download_val, master_release_dict.get('release_id'), udid]))
# cache.delete(
# "_".join([key, CACHE_KEY_TEMPLATE.get("make_token_key"), master_release_dict.get('release_id'), udid]))
# del_make_token_key_cache(master_release_dict.get('release_id'))
def del_cache_by_delete_app(app_id): def del_cache_by_delete_app(app_id):
now = timezone.now() invalid_app_download_times_cache(app_id)
down_tem_key = "_".join([CACHE_KEY_TEMPLATE.get("download_today_times_key"), app_obj = Apps.objects.filter(app_id=app_id).first()
str(now.year), str(now.month), str(now.day), app_id]) invalid_app_cache(app_obj)
cache.delete(down_tem_key) invalid_app_cache(app_obj.has_combo)
cache.delete("_".join([CACHE_KEY_TEMPLATE.get("download_times_key"), app_id]))
cache.delete("_".join([CACHE_KEY_TEMPLATE.get("app_instance_key"), app_id]))
def del_cache_by_app_id(app_id, user_obj): def del_cache_by_app_id(app_id, user_obj):
key = '' del_cache_by_delete_app(app_id)
master_release_dict = AppReleaseInfo.objects.filter(app_id__app_id=app_id, is_master=True).values('icon_url', invalid_head_img_cache(user_obj)
'release_id').first()
download_val = CACHE_KEY_TEMPLATE.get('download_url_key')
cache.delete("_".join([key, download_val, os.path.basename(master_release_dict.get("icon_url"))]))
cache.delete("_".join([key, download_val, master_release_dict.get('release_id')]))
cache.delete(
"_".join([key.lower(), CACHE_KEY_TEMPLATE.get("make_token_key"), master_release_dict.get('release_id')]))
cache.delete("_".join([key, download_val, user_obj.head_img]))
def del_cache_storage(user_obj): def del_cache_storage(user_obj):
@ -209,74 +200,60 @@ def del_cache_storage(user_obj):
for app_obj in Apps.objects.filter(user_id=user_obj): for app_obj in Apps.objects.filter(user_id=user_obj):
del_cache_response_by_short(app_obj.app_id) del_cache_response_by_short(app_obj.app_id)
del_cache_by_app_id(app_obj.app_id, user_obj) del_cache_by_app_id(app_obj.app_id, user_obj)
CloudStorageCache('*', user_obj.uid).del_many()
storage_keys = "_".join([CACHE_KEY_TEMPLATE.get('user_storage_key'), user_obj.uid, '*']) invalid_head_img_cache(user_obj)
for storage_key in cache.iter_keys(storage_keys):
cache.delete(storage_key)
download_val = CACHE_KEY_TEMPLATE.get('download_url_key')
cache.delete("_".join(['', download_val, os.path.basename(user_obj.head_img)]))
def set_app_today_download_times(app_id): def set_app_today_download_times(app_id):
now = timezone.now() cache_obj = AppDownloadTodayTimesCache(app_id)
down_tem_key = "_".join([CACHE_KEY_TEMPLATE.get("download_today_times_key"), if cache_obj.get_storage_cache():
str(now.year), str(now.month), str(now.day), app_id]) cache_obj.incr()
if cache.get(down_tem_key):
cache.incr(down_tem_key)
else: else:
cache.set(down_tem_key, 1, 3600 * 24) cache_obj.set_storage_cache(1, 3600 * 24)
def get_app_today_download_times(app_ids): def get_app_today_download_times(app_ids):
sync_download_times_by_app_id(app_ids) sync_download_times_by_app_id(app_ids)
now = timezone.now()
app_id_lists = []
download_times_count = 0 download_times_count = 0
for app_id in app_ids: for k, v in AppDownloadTodayTimesCache(app_ids).get_many().items():
down_tem_key = "_".join([CACHE_KEY_TEMPLATE.get("download_today_times_key"),
str(now.year), str(now.month), str(now.day), app_id.get("app_id")])
app_id_lists.append(down_tem_key)
down_times_lists = cache.get_many(app_id_lists)
for k, v in down_times_lists.items():
download_times_count += v download_times_count += v
return download_times_count return download_times_count
def upload_file_tmp_name(act, filename, user_obj_id): def upload_file_tmp_name(act, filename, user_obj_id):
tmp_key = "_".join([CACHE_KEY_TEMPLATE.get("upload_file_tmp_name_key"), filename]) cache_obj = UploadTmpFileNameCache(filename)
if act == "set": if act == "set":
cache.delete(tmp_key) cache_obj.del_storage_cache()
cache.set(tmp_key, {'u_time': time.time(), 'id': user_obj_id, "filename": filename}, 2 * 60 * 60) cache_obj.set_storage_cache({'u_time': time.time(), 'id': user_obj_id, "filename": filename}, 2 * 60 * 60)
elif act == "get": elif act == "get":
return cache.get(tmp_key) return cache_obj.get_storage_cache()
elif act == "del": elif act == "del":
cache.delete(tmp_key) cache_obj.del_storage_cache()
def limit_cache_util(act, cache_key, cache_limit_times): def limit_cache_util(act, cache_key, cache_limit_times):
(limit_times, cache_times) = cache_limit_times (limit_times, cache_times) = cache_limit_times
cache_obj = RedisCacheBase(cache_key)
if act == "set": if act == "set":
data = { data = {
"count": 1, "count": 1,
"time": time.time() "time": time.time()
} }
cdata = cache.get(cache_key) cdata = cache_obj.get_storage_cache()
if cdata: if cdata:
data["count"] = cdata["count"] + 1 data["count"] = cdata["count"] + 1
data["time"] = time.time() data["time"] = time.time()
logger.info(f"limit_cache_util cache_key:{cache_key} data:{data}") logger.info(f"limit_cache_util cache_key:{cache_key} data:{data}")
cache.set(cache_key, data, cache_times) cache_obj.set_storage_cache(data, cache_times)
elif act == "get": elif act == "get":
cdata = cache.get(cache_key) cdata = cache_obj.get_storage_cache()
if cdata: if cdata:
if cdata["count"] > limit_times: if cdata["count"] > limit_times:
logger.error(f"limit_cache_util cache_key {cache_key} over limit ,is locked . cdata:{cdata}") logger.error(f"limit_cache_util cache_key {cache_key} over limit ,is locked . cdata:{cdata}")
return False return False
return True return True
elif act == "del": elif act == "del":
cache.delete(cache_key) cache_obj.del_storage_cache()
def login_auth_failed(act, email): def login_auth_failed(act, email):
@ -292,11 +269,11 @@ def send_msg_over_limit(act, email):
def reset_short_response_cache(user_obj, app_obj=None): def reset_short_response_cache(user_obj, app_obj=None):
if app_obj is None: if app_obj is None:
app_obj_short_list = [x[0] for x in Apps.objects.filter(user_id=user_obj).values_list('short').all() if x] app_obj_short_list = Apps.objects.filter(user_id=user_obj).all()
else: else:
app_obj_short_list = [app_obj.short] app_obj_short_list = [app_obj]
for short in app_obj_short_list: for app_obj in app_obj_short_list:
del_short_cache(short) invalid_short_cache(app_obj)
def reset_app_wx_easy_type(user_obj, app_obj=None): def reset_app_wx_easy_type(user_obj, app_obj=None):
@ -310,7 +287,7 @@ def reset_app_wx_easy_type(user_obj, app_obj=None):
if not get_app_domain_name(app_obj): if not get_app_domain_name(app_obj):
app_obj.wxeasytype = True app_obj.wxeasytype = True
app_obj.save(update_fields=['wxeasytype']) app_obj.save(update_fields=['wxeasytype'])
del_short_cache(app_obj.short) invalid_short_cache(app_obj)
def enable_user_download_times_flag(user_id): def enable_user_download_times_flag(user_id):
@ -326,34 +303,30 @@ def check_user_can_download(user_id):
def set_user_download_times_flag(user_id, act): def set_user_download_times_flag(user_id, act):
user_can_download_key = "_".join( cache_obj = UserCanDownloadCache(user_id)
[CACHE_KEY_TEMPLATE.get("user_can_download_key"), str(user_id)])
if act == 2: if act == 2:
result = cache.get(user_can_download_key) result = cache_obj.get_storage_cache()
if result is None: if result is None:
return True return True
return result return result
return cache.set(user_can_download_key, act, 3600 * 24) return cache_obj.set_storage_cache(act, 3600 * 24)
def get_user_free_download_times(user_id, act='get', amount=1, auth_status=False): def get_user_free_download_times(user_id, act='get', amount=1, auth_status=False):
free_download_times = USER_FREE_DOWNLOAD_TIMES free_download_times = USER_FREE_DOWNLOAD_TIMES
if auth_status: if auth_status:
free_download_times = AUTH_USER_FREE_DOWNLOAD_TIMES free_download_times = AUTH_USER_FREE_DOWNLOAD_TIMES
now = timezone.now() cache_obj = UserFreeDownloadTimesCache(user_id)
user_free_download_times_key = "_".join( user_free_download_times = cache_obj.get_storage_cache()
[CACHE_KEY_TEMPLATE.get("user_free_download_times_key"), str(now.year), str(now.month), str(now.day),
str(user_id)])
user_free_download_times = cache.get(user_free_download_times_key)
if user_free_download_times is not None: if user_free_download_times is not None:
if act == 'set': if act == 'set':
return cache.incr(user_free_download_times_key, -amount) return cache_obj.incr(-amount)
else: else:
return user_free_download_times return user_free_download_times
else: else:
cache.set(user_free_download_times_key, free_download_times, 3600 * 24) cache_obj.set_storage_cache(free_download_times, 3600 * 24)
if act == 'set': if act == 'set':
return cache.incr(user_free_download_times_key, -amount) return cache_obj.incr(-amount)
else: else:
return free_download_times return free_download_times
@ -500,60 +473,17 @@ def check_app_permission(app_obj, res, user_obj=None):
def set_wx_ticket_login_info_cache(ticket, data=None, expire_seconds=600): def set_wx_ticket_login_info_cache(ticket, data=None, expire_seconds=600):
if data is None: if data is None:
data = {} data = {}
wx_ticket_info_key = CACHE_KEY_TEMPLATE.get("wx_ticket_info_key") WxTicketCache(ticket).set_storage_cache(data, expire_seconds)
cache.set("_".join([wx_ticket_info_key, ticket]), data, expire_seconds)
def get_wx_ticket_login_info_cache(ticket): def get_wx_ticket_login_info_cache(ticket):
wx_ticket_info_key = CACHE_KEY_TEMPLATE.get("wx_ticket_info_key") cache_obj = WxTicketCache(ticket)
wx_t_key = "_".join([wx_ticket_info_key, ticket]) wx_ticket_info = cache_obj.get_storage_cache()
wx_ticket_info = cache.get(wx_t_key)
if wx_ticket_info: if wx_ticket_info:
cache.delete(wx_t_key) cache_obj.del_storage_cache()
return wx_ticket_info return wx_ticket_info
class CacheBaseState(object):
def __init__(self, key, value=time.time(), timeout=3600 * 24):
self.key = f"CacheBaseState_{self.__class__.__name__}_{key}"
self.value = value
self.timeout = timeout
self.active = False
def get_state(self):
return cache.get(self.key)
def __enter__(self):
if cache.get(self.key):
return False
else:
cache.set(self.key, self.value, self.timeout)
self.active = True
return True
def __exit__(self, exc_type, exc_val, exc_tb):
if self.active:
cache.delete(self.key)
logger.info(f"cache base state __exit__ {exc_type}, {exc_val}, {exc_tb}")
class MigrateStorageState(CacheBaseState):
...
class CleanSignDataState(CacheBaseState):
...
class CleanAppSignDataState(CacheBaseState):
...
class CleanErrorBundleIdSignDataState(CacheBaseState):
...
def add_download_times_free_base(user_obj, amount, payment_name, description, order_type=1): def add_download_times_free_base(user_obj, amount, payment_name, description, order_type=1):
order_number = get_order_num() order_number = get_order_num()
order_obj = Order.objects.create(payment_type=2, order_number=order_number, payment_number=order_number, order_obj = Order.objects.create(payment_type=2, order_number=order_number, payment_number=order_number,
@ -584,24 +514,26 @@ def auth_user_download_times_gift(user_obj, amount=200):
def add_udid_cache_queue(prefix_key, values): def add_udid_cache_queue(prefix_key, values):
prefix_key = f"{CACHE_KEY_TEMPLATE.get('ipa_sign_udid_queue_key')}_{prefix_key}" prefix_key = f"{CACHE_KEY_TEMPLATE.get('ipa_sign_udid_queue_key')}_{prefix_key}"
with cache.lock("%s_%s" % ('add_udid_cache_queue', prefix_key), timeout=10): with cache.lock("%s_%s" % ('add_udid_cache_queue', prefix_key), timeout=10):
data = cache.get(prefix_key) cache_obj = SignUdidQueueCache(prefix_key)
data = cache_obj.get_storage_cache()
if data and isinstance(data, list): if data and isinstance(data, list):
data.append(values) data.append(values)
else: else:
data = [values] data = [values]
cache.set(prefix_key, list(set(data)), 60 * 60) cache_obj.set_storage_cache(list(set(data)), 60 * 60)
return data return data
def get_and_clean_udid_cache_queue(prefix_key): def get_and_clean_udid_cache_queue(prefix_key):
prefix_key = f"{CACHE_KEY_TEMPLATE.get('ipa_sign_udid_queue_key')}_{prefix_key}" prefix_key = f"{CACHE_KEY_TEMPLATE.get('ipa_sign_udid_queue_key')}_{prefix_key}"
with cache.lock("%s_%s" % ('add_udid_cache_queue', prefix_key), timeout=10): with cache.lock("%s_%s" % ('add_udid_cache_queue', prefix_key), timeout=10):
data = cache.get(prefix_key) cache_obj = SignUdidQueueCache(prefix_key)
data = cache_obj.get_storage_cache()
if data and isinstance(data, list): if data and isinstance(data, list):
... ...
else: else:
data = [] data = []
cache.delete(prefix_key) cache_obj.del_storage_cache()
return data return data

@ -28,9 +28,7 @@ class LocalStorage(object):
return make_token(name, expires) return make_token(name, expires)
def get_base_url(self): def get_base_url(self):
uri = 'http://' uri = 'https://' if self.is_https else 'http://'
if self.is_https:
uri = 'https://'
return f"{uri}{self.domain_name}" return f"{uri}{self.domain_name}"
def get_download_url(self, name, expires=600, force_new=False): def get_download_url(self, name, expires=600, force_new=False):

@ -40,9 +40,7 @@ class QiNiuOss(object):
def get_download_url(self, name, expires=1800, force_new=False): def get_download_url(self, name, expires=1800, force_new=False):
# 有两种方式构造base_url的形式 # 有两种方式构造base_url的形式
uri = 'http://' uri = 'https://' if self.is_https else 'http://'
if self.is_https:
uri = 'https://'
base_url = f'{uri}{self.domain_name}/{name}' base_url = f'{uri}{self.domain_name}/{name}'
# 或者直接输入url的方式下载 # 或者直接输入url的方式下载
# 可以设置token过期时间 # 可以设置token过期时间

@ -4,16 +4,13 @@
# author: liuyu # author: liuyu
# date: 2020/3/23 # date: 2020/3/23
import base64
import json
import logging import logging
import time import time
from django.core.cache import cache from common.base.baseutils import get_dict_from_filter_fields
from common.base.magic import run_function_by_locker
from api.models import UserInfo from common.cache.storage import CloudStorageCache, LocalStorageCache, DownloadUrlCache
from api.utils.baseutils import get_dict_from_filter_fields from fir_ser.settings import THIRD_PART_CONFIG_KEY_INFO
from fir_ser.settings import THIRD_PART_CONFIG_KEY_INFO, CACHE_KEY_TEMPLATE
from .aliyunApi import AliYunOss, AliYunCdn from .aliyunApi import AliYunOss, AliYunCdn
from .localApi import LocalStorage from .localApi import LocalStorage
from .qiniuApi import QiNiuOss from .qiniuApi import QiNiuOss
@ -27,6 +24,7 @@ def get_storage_auth(storage_obj):
return get_dict_from_filter_fields(filter_fields, storage_obj.__dict__) return get_dict_from_filter_fields(filter_fields, storage_obj.__dict__)
@run_function_by_locker()
def get_storage(user, assigned_storage_obj, use_default_storage): def get_storage(user, assigned_storage_obj, use_default_storage):
if use_default_storage: if use_default_storage:
return get_storage_form_conf(user) return get_storage_form_conf(user)
@ -37,10 +35,10 @@ def get_storage(user, assigned_storage_obj, use_default_storage):
if storage_obj: if storage_obj:
auth = get_storage_auth(storage_obj) auth = get_storage_auth(storage_obj)
storage_key = "_".join([CACHE_KEY_TEMPLATE.get('user_storage_key'), user.uid,
base64.b64encode(json.dumps(auth).encode("utf-8")).decode("utf-8")[0:64]])
storage_type = storage_obj.storage_type storage_type = storage_obj.storage_type
new_storage_obj = cache.get(storage_key) storage_cache = CloudStorageCache(auth, user.uid)
storage_key, new_storage_obj = storage_cache.get_storage_key_and_cache()
if new_storage_obj and not assigned_storage_obj: if new_storage_obj and not assigned_storage_obj:
logger.info(f"user {user} get storage obj {storage_key} cache {new_storage_obj}") logger.info(f"user {user} get storage obj {storage_key} cache {new_storage_obj}")
return new_storage_obj return new_storage_obj
@ -53,29 +51,21 @@ def get_storage(user, assigned_storage_obj, use_default_storage):
new_storage_obj = LocalStorage(**auth) new_storage_obj = LocalStorage(**auth)
logger.warning(f"user {user} make storage obj key:{storage_key} obj: {new_storage_obj}") logger.warning(f"user {user} make storage obj key:{storage_key} obj: {new_storage_obj}")
new_storage_obj.storage_type = storage_type new_storage_obj.storage_type = storage_type
cache.set(storage_key, new_storage_obj, 600) storage_cache.set_storage_cache(new_storage_obj, 600)
return new_storage_obj return new_storage_obj
else: else:
logger.info(f"user {user} has not storage obj, so get default") logger.info(f"user {user} has not storage obj, so get default")
# return self.get_default_storage(user, storage_obj, False)
# 不需要管理存储,直接从配置文件获取默认存储 # 不需要管理存储,直接从配置文件获取默认存储
return get_storage_form_conf(user) return get_storage_form_conf(user)
def get_default_storage(user, storage_obj, use_default_storage):
admin_obj = UserInfo.objects.filter(is_superuser=True).order_by('pk').first()
if admin_obj and admin_obj.storage and admin_obj.pk != user.pk:
logger.info(f"user {user} has not storage obj, from admin get default storage")
return get_storage(admin_obj, storage_obj, use_default_storage)
else:
return get_storage_form_conf(user)
class Storage(object): class Storage(object):
def __init__(self, user, assigned_storage_obj=None, use_default_storage=False): def __init__(self, user, assigned_storage_obj=None, use_default_storage=False):
try: try:
with cache.lock("%s_%s" % ('make_storage_cache', user.uid), timeout=10, blocking_timeout=6): locker = {
self.storage = get_storage(user, assigned_storage_obj, use_default_storage) 'locker_key': f"make_storage_cache_{user.uid}",
"timeout": 60 * 5, "blocking_timeout": 6}
self.storage = get_storage(user, assigned_storage_obj, use_default_storage, locker=locker)
except Exception as e: except Exception as e:
logger.error(f"get {user} storage failed Exception:{e}") logger.error(f"get {user} storage failed Exception:{e}")
self.storage = None self.storage = None
@ -87,8 +77,8 @@ class Storage(object):
def get_download_url(self, filename, expires=900, key='', force_new=False): def get_download_url(self, filename, expires=900, key='', force_new=False):
if self.storage: if self.storage:
now = time.time() now = time.time()
down_key = "_".join([key.lower(), CACHE_KEY_TEMPLATE.get('download_url_key'), filename]) download_cache = DownloadUrlCache(key, filename)
download_val = cache.get(down_key) download_val = download_cache.get_storage_cache()
if download_val and not force_new: if download_val and not force_new:
if download_val.get("time") > now - 60: if download_val.get("time") > now - 60:
return download_val.get("download_url") return download_val.get("download_url")
@ -97,7 +87,7 @@ class Storage(object):
download_url = cdn_obj.get_cdn_download_token(filename, expires) download_url = cdn_obj.get_cdn_download_token(filename, expires)
else: else:
download_url = self.storage.get_download_url(filename, expires, force_new=True) download_url = self.storage.get_download_url(filename, expires, force_new=True)
cache.set(down_key, {"download_url": download_url, "time": now + expires}, expires) download_cache.set_storage_cache({"download_url": download_url, "time": now + expires}, expires)
return download_url return download_url
def delete_file(self, filename, app_type=None): def delete_file(self, filename, app_type=None):
@ -155,19 +145,18 @@ def get_local_storage(clean_cache=False):
storage_type = storage.get('type', None) storage_type = storage.get('type', None)
if storage_type == 0: if storage_type == 0:
auth = storage.get('auth', {}) auth = storage.get('auth', {})
storage_key = "_".join(['local_storage_', CACHE_KEY_TEMPLATE.get('user_storage_key'), "_system_", storage_cache = LocalStorageCache(auth, "_system_")
base64.b64encode(json.dumps(auth).encode("utf-8")).decode("utf-8")[0:64]]) storage_key, new_storage_obj = storage_cache.get_storage_key_and_cache()
if clean_cache: if clean_cache:
logger.info(f"system clean local storage obj cache storage_key {storage_key}") logger.info(f"system clean local storage obj cache storage_key {storage_key}")
cache.delete(storage_key) storage_cache.del_storage_cache()
new_storage_obj = cache.get(storage_key)
if new_storage_obj: if new_storage_obj:
logger.info(f"system get local storage obj cache {new_storage_obj}") logger.info(f"system get local storage obj cache {new_storage_obj}")
return new_storage_obj return new_storage_obj
else: else:
new_storage_obj = LocalStorage(**auth) new_storage_obj = LocalStorage(**auth)
new_storage_obj.storage_type = 3 new_storage_obj.storage_type = 3
cache.set(storage_key, new_storage_obj, 600) storage_cache.set_storage_cache(new_storage_obj, 600)
logger.info(f"system get local storage obj, from settings storage {new_storage_obj}") logger.info(f"system get local storage obj, from settings storage {new_storage_obj}")
return new_storage_obj return new_storage_obj
@ -178,9 +167,8 @@ def get_storage_form_conf(user):
if storage.get("active", None): if storage.get("active", None):
storage_type = storage.get('type', None) storage_type = storage.get('type', None)
auth = storage.get('auth', {}) auth = storage.get('auth', {})
storage_key = "_".join([CACHE_KEY_TEMPLATE.get('user_storage_key'), 'default', storage_cache = CloudStorageCache(auth, 'default')
base64.b64encode(json.dumps(auth).encode("utf-8")).decode("utf-8")[0:64]]) storage_key, new_storage_obj = storage_cache.get_storage_key_and_cache()
new_storage_obj = cache.get(storage_key)
if new_storage_obj: if new_storage_obj:
logger.info(f"user {user} get default storage {storage_key} obj cache {new_storage_obj} ") logger.info(f"user {user} get default storage {storage_key} obj cache {new_storage_obj} ")
return new_storage_obj return new_storage_obj
@ -194,7 +182,7 @@ def get_storage_form_conf(user):
else: else:
new_storage_obj = LocalStorage(**auth) new_storage_obj = LocalStorage(**auth)
new_storage_obj.storage_type = 3 new_storage_obj.storage_type = 3
cache.set(storage_key, new_storage_obj, 600) storage_cache.set_storage_cache(new_storage_obj, 600)
logger.warning( logger.warning(
f"user {user} has not storage obj, from settings get default storage key:{storage_key} obj:{new_storage_obj}") f"user {user} has not storage obj, from settings get default storage key:{storage_key} obj:{new_storage_obj}")
return new_storage_obj return new_storage_obj

@ -1,29 +0,0 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# project: 9月
# author: NinEveN
# date: 2020/9/24
import base64
from django.core.cache import cache
class TmpCache(object):
@staticmethod
def set_tmp_cache(key, token, target, limit=60 * 5):
nkey = '%s:%s' % (key, token)
nkey = base64.b64encode(nkey.encode("utf-8")).decode("utf-8")
cache.set(nkey, target, limit)
@staticmethod
def get_tmp_cache(key, token):
nkey = '%s:%s' % (key, token)
nkey = base64.b64encode(nkey.encode("utf-8")).decode("utf-8")
return cache.get(nkey)
@staticmethod
def del_tmp_cache(key, token):
nkey = base64.b64encode('%s:%s'.encode("utf-8") % (key, token))
return cache.delete(nkey)

@ -16,14 +16,14 @@ from api.models import APPSuperSignUsedInfo, APPToDeveloper, \
UDIDsyncDeveloper, UserInfo, AppReleaseInfo, AppScreenShot, Token, DeveloperDevicesID, UserAdDisplayInfo UDIDsyncDeveloper, UserInfo, AppReleaseInfo, AppScreenShot, Token, DeveloperDevicesID, UserAdDisplayInfo
from api.utils.TokenManager import generate_numeric_token_of_length, generate_alphanumeric_token_of_length, make_token, \ from api.utils.TokenManager import generate_numeric_token_of_length, generate_alphanumeric_token_of_length, make_token, \
verify_token verify_token
from api.utils.baseutils import get_real_ip_address
from api.utils.modelutils import get_app_d_count_by_app_id from api.utils.modelutils import get_app_d_count_by_app_id
from api.utils.sendmsg.sendmsg import SendMessage from api.utils.sendmsg.sendmsg import SendMessage
from api.utils.storage.caches import consume_user_download_times from api.utils.storage.caches import consume_user_download_times
from api.utils.storage.localApi import LocalStorage from api.utils.storage.localApi import LocalStorage
from api.utils.storage.storage import Storage from api.utils.storage.storage import Storage
from api.utils.tempcaches import TmpCache from common.base.baseutils import get_real_ip_address
from fir_ser.settings import SERVER_DOMAIN, CAPTCHA_LENGTH, MEDIA_ROOT, CACHE_KEY_TEMPLATE from common.cache.storage import UserTokenCache, TempCache
from fir_ser.settings import SERVER_DOMAIN, CAPTCHA_LENGTH, MEDIA_ROOT
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -116,7 +116,7 @@ def get_sender_token(sender, user_id, target, action, msg=None):
if msg: if msg:
code = msg code = msg
token = make_token(code, time_limit=300, key=user_id) token = make_token(code, time_limit=300, key=user_id)
TmpCache.set_tmp_cache(user_id, token, target) TempCache(user_id, token).set_storage_cache(target, 60 * 5)
if action in ('change', 'password', 'register', 'login', 'common'): if action in ('change', 'password', 'register', 'login', 'common'):
sender.send_msg_by_act(target, code, action) sender.send_msg_by_act(target, code, action)
elif action == 'msg': elif action == 'msg':
@ -135,7 +135,7 @@ def get_sender_sms_token(key, phone, action, msg=None):
def is_valid_sender_code(key, token, code, success_once=False): def is_valid_sender_code(key, token, code, success_once=False):
return verify_token(token, code, success_once), TmpCache.get_tmp_cache(key, token) return verify_token(token, code, success_once), TempCache(key, token).get_storage_cache()
def get_sender_email_token(key, email, action, msg=None): def get_sender_email_token(key, email, action, msg=None):
@ -299,28 +299,11 @@ def clean_storage_data(user_obj, storage_obj=None):
return True return True
def get_choices_dict(choices):
result = []
choices_org_list = list(choices)
for choice in choices_org_list:
result.append({'id': choice[0], 'name': choice[1]})
return result
def get_choices_name_from_key(choices, key):
choices_org_list = list(choices)
for choice in choices_org_list:
if choice[0] == key:
return choice[1]
return ''
def set_user_token(user_obj, request): def set_user_token(user_obj, request):
key = binascii.hexlify(os.urandom(32)).decode() key = binascii.hexlify(os.urandom(32)).decode()
now = datetime.datetime.now() now = datetime.datetime.now()
user_info = UserInfo.objects.get(pk=user_obj.pk) user_info = UserInfo.objects.get(pk=user_obj.pk)
auth_key = "_".join([CACHE_KEY_TEMPLATE.get('user_auth_token_key'), key]) UserTokenCache(key).set_storage_cache({'uid': user_info.uid, 'username': user_info.username}, 3600 * 24 * 7)
cache.set(auth_key, {'uid': user_info.uid, 'username': user_info.username}, 3600 * 24 * 7)
Token.objects.create(user=user_obj, Token.objects.create(user=user_obj,
**{"access_token": key, "created": now, "remote_addr": get_real_ip_address(request)}) **{"access_token": key, "created": now, "remote_addr": get_real_ip_address(request)})
return key, user_info return key, user_info
@ -332,6 +315,5 @@ def clean_user_token_and_cache(user_obj, white_token_list=None):
for token_obj in Token.objects.filter(user=user_obj): for token_obj in Token.objects.filter(user=user_obj):
if token_obj.access_token in white_token_list: if token_obj.access_token in white_token_list:
continue continue
auth_key = "_".join([CACHE_KEY_TEMPLATE.get('user_auth_token_key'), token_obj.access_token]) UserTokenCache(token_obj.access_token).del_storage_cache()
cache.delete(auth_key)
token_obj.delete() token_obj.delete()

@ -19,10 +19,10 @@ from api.utils.auth import ExpiringTokenAuthentication
from api.utils.modelutils import get_user_domain_name, get_app_domain_name, check_super_sign_permission from api.utils.modelutils import get_user_domain_name, get_app_domain_name, check_super_sign_permission
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import AppsSerializer, AppReleaseSerializer, AppsListSerializer, AppsQrListSerializer from api.utils.serializer import AppsSerializer, AppReleaseSerializer, AppsListSerializer, AppsQrListSerializer
from api.utils.storage.caches import del_cache_response_by_short, get_app_today_download_times, del_cache_by_delete_app, \ from api.utils.storage.caches import del_cache_response_by_short, get_app_today_download_times, del_cache_by_delete_app
CleanAppSignDataState, MigrateStorageState
from api.utils.storage.storage import Storage from api.utils.storage.storage import Storage
from api.utils.utils import delete_local_files, delete_app_screenshots_files from api.utils.utils import delete_local_files, delete_app_screenshots_files
from common.cache.state import MigrateStorageState, CleanAppSignDataState
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -11,12 +11,11 @@ from rest_framework.views import APIView
from api.models import UserDomainInfo, Apps from api.models import UserDomainInfo, Apps
from api.utils.auth import ExpiringTokenAuthentication from api.utils.auth import ExpiringTokenAuthentication
from api.utils.baseutils import is_valid_domain, get_cname_from_domain
from api.utils.modelutils import get_user_domain_name, get_min_default_domain_cname_obj from api.utils.modelutils import get_user_domain_name, get_min_default_domain_cname_obj
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import DomainNameSerializer from api.utils.serializer import DomainNameSerializer
from api.utils.storage.caches import del_cache_response_by_short, reset_app_wx_easy_type from api.utils.storage.caches import del_cache_response_by_short, reset_app_wx_easy_type
from api.utils.utils import get_choices_dict from common.base.baseutils import is_valid_domain, get_cname_from_domain, get_choices_dict
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -15,8 +15,6 @@ from api.models import Apps, AppReleaseInfo, APPToDeveloper, APPSuperSignUsedInf
from api.utils.TokenManager import verify_token from api.utils.TokenManager import verify_token
from api.utils.app.apputils import make_resigned from api.utils.app.apputils import make_resigned
from api.utils.app.supersignutils import make_sign_udid_mobile_config from api.utils.app.supersignutils import make_sign_udid_mobile_config
from api.utils.baseutils import get_profile_full_path, make_random_uuid, get_origin_domain_name, \
format_get_uri, get_post_udid_url
from api.utils.decorators import cache_response # 本来使用的是 drf-extensions==0.7.0 但是还未支持该版本Django from api.utils.decorators import cache_response # 本来使用的是 drf-extensions==0.7.0 但是还未支持该版本Django
from api.utils.modelutils import get_filename_form_file, check_app_domain_name_access, \ from api.utils.modelutils import get_filename_form_file, check_app_domain_name_access, \
ad_random_weight, get_app_download_uri ad_random_weight, get_app_download_uri
@ -25,6 +23,8 @@ from api.utils.serializer import AppsShortSerializer, AppAdInfoSerializer
from api.utils.storage.caches import del_cache_response_by_short, check_app_permission, get_app_download_url from api.utils.storage.caches import del_cache_response_by_short, check_app_permission, get_app_download_url
from api.utils.storage.storage import Storage, get_local_storage from api.utils.storage.storage import Storage, get_local_storage
from api.utils.throttle import VisitShortThrottle, InstallShortThrottle, InstallThrottle1, InstallThrottle2 from api.utils.throttle import VisitShortThrottle, InstallShortThrottle, InstallThrottle1, InstallThrottle2
from common.base.baseutils import get_profile_full_path, make_random_uuid, get_origin_domain_name, \
format_get_uri, get_post_udid_url
from fir_ser import settings from fir_ser import settings
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -6,7 +6,6 @@ from rest_framework.views import APIView
from api.models import UserInfo, UserCertificationInfo, CertificationInfo, Apps from api.models import UserInfo, UserCertificationInfo, CertificationInfo, Apps
from api.utils.auth import ExpiringTokenAuthentication from api.utils.auth import ExpiringTokenAuthentication
from api.utils.baseutils import is_valid_phone, is_valid_email, get_real_ip_address
from api.utils.geetest.geetest_utils import first_register, second_validate from api.utils.geetest.geetest_utils import first_register, second_validate
from api.utils.modelutils import get_min_default_domain_cname_obj, add_remote_info_from_request from api.utils.modelutils import get_min_default_domain_cname_obj, add_remote_info_from_request
from api.utils.mp.wechat import make_wx_login_qrcode, show_qrcode_url from api.utils.mp.wechat import make_wx_login_qrcode, show_qrcode_url
@ -18,6 +17,7 @@ from api.utils.throttle import VisitRegister1Throttle, VisitRegister2Throttle, G
from api.utils.utils import get_captcha, valid_captcha, \ from api.utils.utils import get_captcha, valid_captcha, \
get_sender_sms_token, is_valid_sender_code, get_sender_email_token, get_random_username, \ get_sender_sms_token, is_valid_sender_code, get_sender_email_token, get_random_username, \
check_username_exists, set_user_token, clean_user_token_and_cache check_username_exists, set_user_token, clean_user_token_and_cache
from common.base.baseutils import is_valid_phone, is_valid_email, get_real_ip_address
from fir_ser.settings import LOGIN, CHANGER, REPORT, NEW_USER_GIVE_DOWNLOAD_TIMES, REGISTER from fir_ser.settings import LOGIN, CHANGER, REPORT, NEW_USER_GIVE_DOWNLOAD_TIMES, REGISTER
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -1,12 +1,12 @@
import logging import logging
from django.contrib import auth from django.contrib import auth
from django.core.cache import cache
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
from api.models import Token from api.models import Token
from api.utils.auth import ExpiringTokenAuthentication from api.utils.auth import ExpiringTokenAuthentication
from common.cache.storage import RedisCacheBase
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -18,7 +18,7 @@ class LogoutView(APIView):
logger.info(f"user:{request.user} logout") logger.info(f"user:{request.user} logout")
user = request.user.pk user = request.user.pk
auth_token = request.auth auth_token = request.auth
cache.delete(auth_token) RedisCacheBase(auth_token).del_storage_cache()
Token.objects.filter(user=user, access_token=auth_token).delete() Token.objects.filter(user=user, access_token=auth_token).delete()
auth.logout(request) auth.logout(request)
return Response({"code": 1000}) return Response({"code": 1000})

@ -12,12 +12,11 @@ from rest_framework.views import APIView
from api.models import Price, Order from api.models import Price, Order
from api.utils.auth import ExpiringTokenAuthentication from api.utils.auth import ExpiringTokenAuthentication
from api.utils.baseutils import get_order_num
from api.utils.pay.util import get_pay_obj_form_name, get_enable_pay_choices, get_payment_type from api.utils.pay.util import get_pay_obj_form_name, get_enable_pay_choices, get_payment_type
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import PriceSerializer, OrdersSerializer from api.utils.serializer import PriceSerializer, OrdersSerializer
from api.utils.storage.caches import update_order_status from api.utils.storage.caches import update_order_status
from api.utils.utils import get_choices_dict from common.base.baseutils import get_order_num, get_choices_dict
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -14,12 +14,12 @@ from rest_framework.views import APIView
from api.models import Apps from api.models import Apps
from api.tasks import run_sign_task from api.tasks import run_sign_task
from api.utils.app.supersignutils import udid_bytes_to_dict, make_sign_udid_mobile_config from api.utils.app.supersignutils import udid_bytes_to_dict, make_sign_udid_mobile_config
from api.utils.baseutils import get_real_ip_address, get_http_server_domain, make_random_uuid
from api.utils.modelutils import get_redirect_server_domain, add_remote_info_from_request, \ from api.utils.modelutils import get_redirect_server_domain, add_remote_info_from_request, \
get_app_download_uri get_app_download_uri
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.storage.caches import check_app_permission from api.utils.storage.caches import check_app_permission
from api.utils.throttle import ReceiveUdidThrottle1, ReceiveUdidThrottle2 from api.utils.throttle import ReceiveUdidThrottle1, ReceiveUdidThrottle2
from common.base.baseutils import get_real_ip_address, get_http_server_domain, make_random_uuid
from fir_ser.celery import app from fir_ser.celery import app
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -9,13 +9,13 @@ from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
from api.models import Apps, AppReportInfo from api.models import Apps, AppReportInfo
from api.utils.baseutils import get_real_ip_address
from api.utils.modelutils import add_remote_info_from_request from api.utils.modelutils import add_remote_info_from_request
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import AppReportSerializer from api.utils.serializer import AppReportSerializer
from api.utils.storage.caches import login_auth_failed from api.utils.storage.caches import login_auth_failed
from api.utils.throttle import InstallThrottle2 from api.utils.throttle import InstallThrottle2
from api.utils.utils import is_valid_sender_code, get_captcha, get_choices_dict from api.utils.utils import is_valid_sender_code, get_captcha
from common.base.baseutils import get_real_ip_address, get_choices_dict
from fir_ser.settings import REPORT from fir_ser.settings import REPORT
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -15,8 +15,9 @@ from api.utils.app.apputils import clean_history_apps
from api.utils.auth import ExpiringTokenAuthentication, StoragePermission from api.utils.auth import ExpiringTokenAuthentication, StoragePermission
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import StorageSerializer from api.utils.serializer import StorageSerializer
from api.utils.storage.caches import MigrateStorageState from api.utils.utils import upload_oss_default_head_img
from api.utils.utils import upload_oss_default_head_img, get_choices_dict from common.base.baseutils import get_choices_dict
from common.cache.state import MigrateStorageState
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -20,8 +20,10 @@ from api.utils.modelutils import get_user_public_used_sign_num, get_user_public_
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.serializer import DeveloperSerializer, SuperSignUsedSerializer, DeviceUDIDSerializer, BillInfoSerializer, \ from api.utils.serializer import DeveloperSerializer, SuperSignUsedSerializer, DeviceUDIDSerializer, BillInfoSerializer, \
DeveloperDeviceSerializer, AppleDeveloperToAppUseSerializer, AppleDeveloperToAppUseAppsSerializer DeveloperDeviceSerializer, AppleDeveloperToAppUseSerializer, AppleDeveloperToAppUseAppsSerializer
from api.utils.storage.caches import CleanSignDataState, get_app_download_url from api.utils.storage.caches import get_app_download_url
from api.utils.utils import get_developer_devices, get_choices_dict from api.utils.utils import get_developer_devices
from common.base.baseutils import get_choices_dict
from common.cache.state import CleanSignDataState
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -15,11 +15,12 @@ from api.tasks import run_resign_task
from api.utils.TokenManager import verify_token from api.utils.TokenManager import verify_token
from api.utils.app.apputils import get_random_short, save_app_infos from api.utils.app.apputils import get_random_short, save_app_infos
from api.utils.auth import ExpiringTokenAuthentication from api.utils.auth import ExpiringTokenAuthentication
from api.utils.baseutils import make_app_uuid, make_from_user_uuid
from api.utils.modelutils import check_super_sign_permission, get_app_download_uri from api.utils.modelutils import check_super_sign_permission, get_app_download_uri
from api.utils.response import BaseResponse from api.utils.response import BaseResponse
from api.utils.storage.caches import upload_file_tmp_name, del_cache_response_by_short, MigrateStorageState from api.utils.storage.caches import upload_file_tmp_name, del_cache_response_by_short
from api.utils.storage.storage import Storage from api.utils.storage.storage import Storage
from common.base.baseutils import make_app_uuid, make_from_user_uuid
from common.cache.state import MigrateStorageState
from fir_ser import settings from fir_ser import settings
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -285,7 +285,7 @@ def get_real_ip_address(request):
def get_origin_domain_name(request): def get_origin_domain_name(request):
meta = request.META meta = request.META
return request.META.get('HTTP_ORIGIN', meta.get('HTTP_REFERER', 'http://xxx/xxx')).split('//')[-1].split('/')[0] return request.META.get('HTTP_ORIGIN', meta.get('HTTP_REFERER', 'https://xxx/xxx')).split('//')[-1].split('/')[0]
def format_get_uri(domain, short, data): def format_get_uri(domain, short, data):
@ -304,3 +304,19 @@ def get_order_num(order_type=1):
now.second) now.second)
return date_str + str(random.randint(1000, 9999)) + str(random.randint(1000, 9999)) + str( return date_str + str(random.randint(1000, 9999)) + str(random.randint(1000, 9999)) + str(
random.randint(1000, 9999)) random.randint(1000, 9999))
def get_choices_dict(choices):
result = []
choices_org_list = list(choices)
for choice in choices_org_list:
result.append({'id': choice[0], 'name': choice[1]})
return result
def get_choices_name_from_key(choices, key):
choices_org_list = list(choices)
for choice in choices_org_list:
if choice[0] == key:
return choice[1]
return ''

@ -0,0 +1,65 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# project: 12月
# author: NinEveN
# date: 2021/12/22
import logging
import time
from functools import wraps
from django.core.cache import cache
logger = logging.getLogger(__name__)
def run_function_by_locker(timeout=60 * 5):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
start_time = time.time()
locker = kwargs.get('locker', {})
if locker:
kwargs.pop('locker')
t_locker = {'timeout': timeout, 'locker_key': func.__name__}
t_locker.update(locker)
new_locker_key = t_locker.pop('locker_key')
new_timeout = t_locker.pop('timeout')
if locker and new_timeout and new_locker_key:
with cache.lock(new_locker_key, timeout=new_timeout, **t_locker):
logger.info(f"{new_locker_key} exec {func} start. now time:{time.time()}")
res = func(*args, **kwargs)
else:
res = func(*args, **kwargs)
logger.info(f"{new_locker_key} exec {func} finished. used time:{time.time() - start_time}")
return res
return wrapper
return decorator
def call_function_try_attempts(try_attempts=3, sleep_time=2, failed_callback=None):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
res = False, {}
start_time = time.time()
for i in range(try_attempts):
res = func(*args, **kwargs)
status, result = res
if status:
return res
else:
logger.warning(f'exec {func} failed. {try_attempts} times in total. now {sleep_time} later try '
f'again...{i}')
time.sleep(sleep_time)
if not res[0]:
logger.error(f'exec {func} failed after the maximum number of attempts. Failed:{res[1]}')
if failed_callback:
failed_callback()
logger.info(f"exec {func} finished. time:{time.time() - start_time}")
return res
return wrapper
return decorator

@ -0,0 +1,118 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# project: 12月
# author: NinEveN
# date: 2021/12/23
from api.models import AppScreenShot, AppReleaseInfo, Apps
from common.cache.storage import AdPicShowCache, AppDownloadShortShowCache, RedisCacheBase, DownloadUrlCache, \
TokenManagerCache, CloudStorageCache, AppInstanceCache, AppDownloadTimesCache
def invalid_screen_pic_cache(key, app_obj):
for screen_shot_obj in AppScreenShot.objects.filter(app_id=app_obj).all():
DownloadUrlCache(key, screen_shot_obj.screenshot_url).del_storage_cache()
def invalid_ad_pic_cache(key, short):
ad_pic_short_cache = AdPicShowCache(key, short)
ad_pic_cache_key = ad_pic_short_cache.get_storage_cache()
if ad_pic_cache_key:
DownloadUrlCache(key, ad_pic_cache_key).del_storage_cache()
ad_pic_short_cache.del_storage_cache()
def invalid_short_response_cache(key, short):
short_storage = AppDownloadShortShowCache(key, short)
response_cache_key_list = short_storage.get_storage_cache()
if response_cache_key_list and isinstance(response_cache_key_list, list):
for response_cache_key in response_cache_key_list:
RedisCacheBase(response_cache_key).del_storage_cache()
short_storage.del_storage_cache()
def invalid_short_cache(app_obj, key='ShortDownloadView'.lower()):
"""
失效下载页生成的缓存数据
:param key:
:param app_obj:
:return:
缓存key: 'ShortDownloadView'.lower()
1.清理图片缓存
2.清理下载token缓存
3.清理广告缓存
4.清理应用截图缓存
5.清理下载token缓存[无需清理]
6.清理response 响应缓存
7.清理生成下载实例缓存
# 8.如果有关联应用,还需要清理关联应用的图片缓存和下载token缓存
"""
if not app_obj:
return
master_release_dict = AppReleaseInfo.objects.filter(app_id=app_obj, is_master=True).values('icon_url',
'release_id').first()
# 1.清理图片缓存
DownloadUrlCache(key, master_release_dict.get('icon_url')).del_storage_cache()
release_id = master_release_dict.get('release_id')
# 2.清理下载token缓存
TokenManagerCache(key, release_id).del_storage_cache()
TokenManagerCache('', f"{release_id}.plist").del_storage_cache()
# 3.清理广告缓存
invalid_ad_pic_cache(key, app_obj.short)
# 4.清理应用截图缓存
invalid_screen_pic_cache(key, app_obj)
# 6.清理response 响应缓存
invalid_short_response_cache(key, app_obj.short)
# 7.清理生成下载实例缓存
AppInstanceCache(app_obj.app_id).del_storage_cache()
def invalid_app_cache(app_obj):
"""
删除app的时候需要执行清理操作
:param app_obj:
:return:
"""
invalid_short_cache(app_obj, '')
invalid_short_cache(app_obj, 'ShortDownloadView'.lower())
def invalid_head_img_cache(user_obj):
"""
:param user_obj:
:return:
"""
DownloadUrlCache('', user_obj.head_img).del_storage_cache()
def invalid_user_storage_cache(user_obj, storage_auth):
"""
:param user_obj:
:param storage_auth:
:return:
"""
CloudStorageCache(storage_auth, user_obj.uid).del_storage_cache()
for app_obj in Apps.objects.filter(user_id=user_obj).all():
invalid_app_cache(app_obj)
invalid_head_img_cache(user_obj)
def invalid_app_download_times_cache(app_id):
"""
删除应用的时候需要清理一下下载次数缓存
:param app_id:
:return:
"""
AppDownloadTimesCache(app_id).del_storage_cache()
AppInstanceCache(app_id).del_storage_cache()
def invalid_app_download_plist_cache(release_id):
TokenManagerCache('', release_id).del_storage_cache()

@ -0,0 +1,52 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# project: 12月
# author: NinEveN
# date: 2021/12/22
import logging
import time
from django.core.cache import cache
logger = logging.getLogger(__name__)
class CacheBaseState(object):
def __init__(self, key, value=time.time(), timeout=3600 * 24):
self.key = f"CacheBaseState_{self.__class__.__name__}_{key}"
self.value = value
self.timeout = timeout
self.active = False
def get_state(self):
return cache.get(self.key)
def __enter__(self):
if cache.get(self.key):
return False
else:
cache.set(self.key, self.value, self.timeout)
self.active = True
return True
def __exit__(self, exc_type, exc_val, exc_tb):
if self.active:
cache.delete(self.key)
logger.info(f"cache base state __exit__ {exc_type}, {exc_val}, {exc_tb}")
class MigrateStorageState(CacheBaseState):
...
class CleanSignDataState(CacheBaseState):
...
class CleanAppSignDataState(CacheBaseState):
...
class CleanErrorBundleIdSignDataState(CacheBaseState):
...

@ -0,0 +1,205 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# project: 12月
# author: NinEveN
# date: 2021/12/22
import base64
import json
import logging
from django.core.cache import cache
from django.utils import timezone
from fir_ser.settings import CACHE_KEY_TEMPLATE
logger = logging.getLogger(__name__)
class RedisCacheBase(object):
def __init__(self, cache_key):
self.cache_key = cache_key
def __getattribute__(self, item):
if isinstance(item, str) and item != 'cache_key':
if hasattr(self, "cache_key"):
logger.debug(f'act:{item} cache_key:{super().__getattribute__("cache_key")}')
return super().__getattribute__(item)
def get_storage_cache(self):
return cache.get(self.cache_key)
def get_storage_key_and_cache(self):
return self.cache_key, cache.get(self.cache_key)
def set_storage_cache(self, value, timeout=600):
return cache.set(self.cache_key, value, timeout)
def del_storage_cache(self):
return cache.delete(self.cache_key)
def incr(self, amount=1):
return cache.incr(self.cache_key, amount)
def expire(self, timeout):
return cache.expire(self.cache_key, timeout=timeout)
def iter_keys(self):
if not self.cache_key.endswith('*'):
self.cache_key = f"{self.cache_key}*"
return cache.iter_keys(self.cache_key)
def get_many(self):
return cache.get_many(self.cache_key)
def del_many(self):
for delete_key in cache.iter_keys(self.cache_key):
cache.delete(delete_key)
return True
class CloudStorageCache(RedisCacheBase):
def __init__(self, auth, uid):
if auth == '*':
bid = auth
else:
bid = base64.b64encode(json.dumps(auth).encode("utf-8")).decode("utf-8")[0:64]
self.cache_key = f"{CACHE_KEY_TEMPLATE.get('user_storage_key')}_{uid}_{bid}"
super().__init__(self.cache_key)
class LocalStorageCache(RedisCacheBase):
def __init__(self, auth, uid):
bid = base64.b64encode(json.dumps(auth).encode("utf-8")).decode("utf-8")[0:64]
self.cache_key = f"local_storage_{CACHE_KEY_TEMPLATE.get('user_storage_key')}_{uid}_{bid}"
super().__init__(self.cache_key)
class DownloadUrlCache(RedisCacheBase):
def __init__(self, key, filename):
self.cache_key = f"{key.lower()}_{CACHE_KEY_TEMPLATE.get('download_url_key')}_{filename}"
super().__init__(self.cache_key)
class UserTokenCache(RedisCacheBase):
def __init__(self, key):
self.cache_key = f"{CACHE_KEY_TEMPLATE.get('user_auth_token_key')}_{key}"
super().__init__(self.cache_key)
class IpProxyListCache(RedisCacheBase):
def __init__(self):
self.cache_key = CACHE_KEY_TEMPLATE.get("ip_proxy_store_list_key")
super().__init__(self.cache_key)
class IpProxyActiveCache(RedisCacheBase):
def __init__(self):
self.cache_key = CACHE_KEY_TEMPLATE.get("ip_proxy_store_active_key")
super().__init__(self.cache_key)
class TokenManagerCache(RedisCacheBase):
def __init__(self, key, release_id):
self.cache_key = f"{key.lower()}_{CACHE_KEY_TEMPLATE.get('make_token_key')}_{release_id}"
super().__init__(self.cache_key)
class AdPicShowCache(RedisCacheBase):
def __init__(self, key, short):
self.cache_key = f"{key.lower()}_{CACHE_KEY_TEMPLATE.get('ad_pic_show_key')}_{short}"
super().__init__(self.cache_key)
class TempCache(RedisCacheBase):
def __init__(self, key, token):
self.cache_key = base64.b64encode(f"{key}:{token}".encode("utf-8")).decode("utf-8")
super().__init__(self.cache_key)
class WxTokenCache(RedisCacheBase):
def __init__(self):
self.cache_key = CACHE_KEY_TEMPLATE.get("wx_access_token_key")
super().__init__(self.cache_key)
class WxTicketCache(RedisCacheBase):
def __init__(self, ticket):
self.cache_key = f"{CACHE_KEY_TEMPLATE.get('wx_ticket_info_key')}_{ticket}"
super().__init__(self.cache_key)
class AppInstanceCache(RedisCacheBase):
def __init__(self, app_id):
self.cache_key = f"{CACHE_KEY_TEMPLATE.get('app_instance_key')}_{app_id}"
super().__init__(self.cache_key)
class AppDownloadTimesCache(RedisCacheBase):
def __init__(self, app_id):
self.init_many_keys(app_id)
super().__init__(self.cache_key)
def init_many_keys(self, app_id):
bmp_key = CACHE_KEY_TEMPLATE.get('download_times_key')
if isinstance(app_id, list):
self.cache_key = []
for key in app_id:
self.cache_key.append(f"{bmp_key}_{key}")
else:
self.cache_key = f"{bmp_key}_{app_id}"
class AppDownloadTodayTimesCache(RedisCacheBase):
def __init__(self, app_id):
self.init_many_keys(app_id)
super().__init__(self.cache_key)
def init_many_keys(self, app_id):
now = timezone.now()
tmp_key = CACHE_KEY_TEMPLATE.get("download_today_times_key")
bmp_key = f"{tmp_key}_{now.year}_{now.month}_{now.day}"
if isinstance(app_id, list):
self.cache_key = []
for key in app_id:
self.cache_key.append(f"{bmp_key}_{key}")
else:
self.cache_key = f"{bmp_key}_{app_id}"
class AppDownloadShortCache(RedisCacheBase):
def __init__(self, key, short):
self.cache_key = f"{key.lower()}_{CACHE_KEY_TEMPLATE.get('download_short_key')}_{short}"
super().__init__(self.cache_key)
class AppDownloadShortShowCache(RedisCacheBase):
def __init__(self, key, short):
self.cache_key = f"{key.lower()}_{CACHE_KEY_TEMPLATE.get('download_short_show_key')}_{short}"
super().__init__(self.cache_key)
class UploadTmpFileNameCache(RedisCacheBase):
def __init__(self, filename):
self.cache_key = f"{CACHE_KEY_TEMPLATE.get('upload_file_tmp_name_key')}_{filename}"
super().__init__(self.cache_key)
class UserCanDownloadCache(RedisCacheBase):
def __init__(self, user_id):
self.cache_key = f"{CACHE_KEY_TEMPLATE.get('user_can_download_key')}_{user_id}"
super().__init__(self.cache_key)
class UserFreeDownloadTimesCache(RedisCacheBase):
def __init__(self, user_id):
now = timezone.now()
tmp_key = CACHE_KEY_TEMPLATE.get('user_free_download_times_key')
self.cache_key = f"{tmp_key}_{now.year}_{now.month}_{now.day}_{user_id} "
super().__init__(self.cache_key)
class SignUdidQueueCache(RedisCacheBase):
def __init__(self, prefix_key):
self.cache_key = f"{CACHE_KEY_TEMPLATE.get('ipa_sign_udid_queue_key')}_{prefix_key}"
super().__init__(self.cache_key)

@ -223,7 +223,9 @@ CACHE_KEY_TEMPLATE = {
'user_can_download_key': 'user_can_download', 'user_can_download_key': 'user_can_download',
'download_times_key': 'app_download_times', 'download_times_key': 'app_download_times',
'make_token_key': 'make_token', 'make_token_key': 'make_token',
'ad_pic_show_key': 'ad_pic_show',
'download_short_key': 'download_short', 'download_short_key': 'download_short',
'download_short_show_key': 'download_short_show',
'app_instance_key': 'app_instance', 'app_instance_key': 'app_instance',
'download_url_key': 'download_url', 'download_url_key': 'download_url',
'user_storage_key': 'storage_auth', 'user_storage_key': 'storage_auth',

Loading…
Cancel
Save