Quellcode durchsuchen

aliyun tools rearranged.

duh@133.cn vor 5 Jahren
Ursprung
Commit
04b7f9a3ae
27 geänderte Dateien mit 1521 neuen und 0 gelöschten Zeilen
  1. 168 0
      pyproject/AliyunTools/ECSMonotor/ECSMonitorApi.py
  2. 3 0
      pyproject/AliyunTools/ECSMonotor/config.py
  3. 58 0
      pyproject/AliyunTools/README.md
  4. 1 0
      pyproject/AliyunTools/aliyun_eipaddress/.python-version
  5. 153 0
      pyproject/AliyunTools/aliyun_eipaddress/ECSCreater.py
  6. 131 0
      pyproject/AliyunTools/aliyun_eipaddress/EIPChanger.py
  7. 30 0
      pyproject/AliyunTools/aliyun_eipaddress/InstanceID.py
  8. BIN
      pyproject/AliyunTools/aliyun_eipaddress/__pycache__/EIPChanger.cpython-37.pyc
  9. BIN
      pyproject/AliyunTools/aliyun_eipaddress/__pycache__/InstanceID.cpython-37.pyc
  10. BIN
      pyproject/AliyunTools/aliyun_eipaddress/__pycache__/config.cpython-37.pyc
  11. BIN
      pyproject/AliyunTools/aliyun_eipaddress/__pycache__/index.cpython-37.pyc
  12. BIN
      pyproject/AliyunTools/aliyun_eipaddress/__pycache__/word_generater.cpython-37.pyc
  13. 78 0
      pyproject/AliyunTools/aliyun_eipaddress/call/interfacecaller.py
  14. 3 0
      pyproject/AliyunTools/aliyun_eipaddress/config.py
  15. 3 0
      pyproject/AliyunTools/aliyun_eipaddress/config_hbgj.py
  16. 17 0
      pyproject/AliyunTools/aliyun_eipaddress/howtorun.txt
  17. 75 0
      pyproject/AliyunTools/aliyun_eipaddress/index.py
  18. 2 0
      pyproject/AliyunTools/aliyun_eipaddress/logs/access.log
  19. 0 0
      pyproject/AliyunTools/aliyun_eipaddress/logs/debug.log
  20. 182 0
      pyproject/AliyunTools/aliyun_eipaddress/logs/error.log
  21. 1 0
      pyproject/AliyunTools/aliyun_eipaddress/run/gunicorn.pid
  22. 15 0
      pyproject/AliyunTools/aliyun_eipaddress/word_generater.py
  23. 9 0
      pyproject/Es2Mongo/README.md
  24. 111 0
      pyproject/Es2Mongo/esdata2mongo.py
  25. 84 0
      pyproject/Es2Mongo/transdata.py
  26. 138 0
      pyproject/zabbixAPItools/jvmthread_graph_creater.py
  27. 259 0
      pyproject/zabbixAPItools/zabbix_add_screen.py

+ 168 - 0
pyproject/AliyunTools/ECSMonotor/ECSMonitorApi.py

@@ -0,0 +1,168 @@
+# coding: utf-8
+
+import json
+import time
+from datetime import datetime, timezone
+from pprint import pprint
+from config import Config
+from word_generater import decodestr
+from influxdb import InfluxDBClient
+from aliyunsdkcore.client import AcsClient
+from aliyunsdkcore.acs_exception.exceptions import ClientException
+from aliyunsdkcore.acs_exception.exceptions import ServerException
+from aliyunsdkcms.request.v20190101.DescribeMetricLastRequest import DescribeMetricLastRequest
+from aliyunsdkecs.request.v20140526.DescribeInstancesRequest import DescribeInstancesRequest
+
+
+class ECSData2InfluxDB():
+    def __init__(self, region):
+        self.region = region
+        self.client = AcsClient(decodestr(Config.accessKeyId), decodestr(
+            Config.accessSecret), self.region)
+        self.influxclient = InfluxDBClient(
+            '127.0.0.1', 8086, 'root', 'root', 'aliyun')
+
+    @property
+    def doAaction(self):
+        self.request.set_accept_format('json')
+        response = self.client.do_action_with_exception(self.request)
+        return json.loads(str(response, encoding='utf-8'))
+
+    ''' data['Datapoints']
+     {'Average': 0,
+  'Maximum': 0,
+  'Minimum': 0,
+  'instanceId': 'i-2ze551ld3zvhp56r24vy',
+  'timestamp': 1575360720000,
+  'userId': '1655840174173842'},
+ {'Average': 0,
+  'Maximum': 0,
+  'Minimum': 0,
+  'instanceId': 'i-2ze551ld3zvhp56r24uw',
+  'timestamp': 1575360720000,
+  'userId': '1655840174173842'}]
+    '''
+    @property
+    def DescribeECSInternetOutRate(self):
+        self.request = DescribeMetricLastRequest()
+        self.request.set_MetricName(self.action)
+        self.request.set_Period("60")
+        self.request.set_Namespace("acs_ecs_dashboard")
+        self.request.set_Express("{\"groupby\":[\"instanceId\"]}")
+        self.request.set_Length("10000")
+        data = self.doAaction
+        # with open("DescribeECSInternetOutRate.log", 'w') as f:
+        #     f.write(str(data))
+        return eval(data.get('Datapoints'))
+
+    def DescribeInstances(self, InstanceIds):
+        self.request = DescribeInstancesRequest()
+        self.request.set_PageSize(100)
+        # InstanceIds ["i-2ze551ld3zvhp56r2504"]
+        self.request.set_InstanceIds(InstanceIds)
+        Data = self.doAaction
+        InstancesInfo = {}
+        for instance in Data['Instances']["Instance"]:
+            # pprint(instance)
+            if instance.get("NetworkInterfaces"):
+                PrivateIpAddress = instance['NetworkInterfaces']["NetworkInterface"][0]["PrimaryIpAddress"]
+            else:
+                PrivateIpAddress = instance["InnerIpAddress"]["IpAddress"][0]
+            if instance.get('PublicIpAddress') and len(instance.get('PublicIpAddress')) > 1:
+                PublicIpAddress = instance["PublicIpAddress"]["IpAddress"][0]
+            elif instance.get('EipAddress'):
+                IpAddress = instance["EipAddress"]["IpAddress"]
+                if isinstance(IpAddress, str):
+                    PublicIpAddress = instance["EipAddress"]["IpAddress"]
+                else:
+                    PublicIpAddress = instance["EipAddress"]["IpAddress"][0]
+            else:
+                PublicIpAddress = ""
+            #PublicIpAddress = instance["PublicIpAddress"]["IpAddress"][0]
+            InstanceId = instance["InstanceId"]
+            InstanceChargeType = instance["InstanceChargeType"]
+            InstancesInfo[InstanceId] = {"PublicIpAddress": PublicIpAddress, "PrivateIpAddress": PrivateIpAddress,
+                                         "InstanceChargeType": InstanceChargeType, 'InstanceId': InstanceId}
+            # InstancesInfo.append({'InstanceId':InstanceId, "PublicIpAddress": PublicIpAddress, "PrivateIpAddress":PrivateIpAddress, "InstanceChargeType":InstanceChargeType})
+        return InstancesInfo
+
+    @property
+    def RearrangeData(self):
+        ecsdatalist = self.DescribeECSInternetOutRate
+        ecsdatadict = {}
+        for data in ecsdatalist:
+            ecsdatadict[data['instanceId']] = data
+        instanceids = [x['instanceId'] for x in ecsdatalist]
+        instanceids = [instanceids[i:i+100]
+                       for i in range(0, len(instanceids), 100)]
+        for arr in instanceids:
+            Ipinfos = self.DescribeInstances(arr)
+            for data in Ipinfos.keys():
+                ecsdatadict[data]['PrivateIpAddress'] = Ipinfos[data]['PrivateIpAddress']
+                ecsdatadict[data]['PublicIpAddress'] = Ipinfos[data]['PublicIpAddress']
+                #instanceid = data['instanceId']
+                # print(instanceid)
+                #PrivateIpAddress = Ipinfos[instanceid]['PrivateIpAddress']
+                #PublicIpAddress = Ipinfos[instanceid]['PublicIpAddress']
+                #data['PrivateIpAddress'] = PrivateIpAddress
+                #data['PublicIpAddress'] = PublicIpAddress
+        return ecsdatadict
+
+    @property
+    def Save2Influx(self):
+        data = self.RearrangeData
+        influx_jsons = self.GenInfluxJson(data)
+        # pprint(influx_jsons)
+        res = self.influxclient.write_points(influx_jsons)
+        return res
+    
+    def GenInfluxJson(self, data):
+        influx_jsons = []
+        table = self.action
+        n = 0.01
+        if self.action == "VPC_PublicIP_InternetOutRate":
+            for k, v in data.items():
+                if v.get("PublicIpAddress"):
+                    timestamp = int(str(v['timestamp'])[:10])
+                    if int(str(timestamp+n)[-1]) == 9:
+                        n += 0.02
+                    jsonbody = {"measurement": table,
+                            "time": datetime.fromtimestamp(timestamp+n, timezone.utc).isoformat("T"),
+                            "tags": {"PrivateIpAddress": v["PrivateIpAddress"]
+                                    },
+                            "fields": {"value": int(v['Average'])
+                                    }
+                            }
+                    n += 0.01
+                    influx_jsons.append(jsonbody)
+#        if self.action == "cpu_total" or self.action == "IntranetOutRate":
+        else:
+            for k, v in data.items():
+                if v.get("PrivateIpAddress") and "10.0." in v.get("PrivateIpAddress"):
+                    timestamp = int(str(v['timestamp'])[:10])
+                    if int(str(timestamp+n)[-1]) == 9:
+                        n += 0.02
+                    jsonbody = {"measurement": table,
+                            "time": datetime.fromtimestamp(timestamp+n, timezone.utc).isoformat("T"),
+                            "tags": {"PrivateIpAddress": v["PrivateIpAddress"]
+                                    },
+                            "fields": {"value": int(v['Average'])
+                                }
+                        }
+                    n += 0.01
+                    influx_jsons.append(jsonbody)
+        return influx_jsons
+
+    def Run(self):
+        actions = ["VPC_PublicIP_InternetOutRate", "IntranetOutRate", "cpu_total", "memory_usedutilization", "net_tcpconnection","load_5m"]
+        for action in actions:
+            self.action = action
+            self.Save2Influx
+
+if __name__ == "__main__":
+
+    while 1:
+        ins = ECSData2InfluxDB("cn-beijing")
+        ins.Run()
+        time.sleep(30)
+

+ 3 - 0
pyproject/AliyunTools/ECSMonotor/config.py

@@ -0,0 +1,3 @@
+class Config():
+    accessKeyId = ""
+    accessSecret = ""

+ 58 - 0
pyproject/AliyunTools/README.md

@@ -0,0 +1,58 @@
+# ECSCreater
+## 创建
+- 地址:http://60.205.208.168:5000/createecs
+- HTTP Method: POST
+- Parameter:
+  - set_Region:cn-beijing      // 大区
+  - set_InternetChargeType: // 网络计费方式         PayByTraffic // 流量按量计费           PayByBandwidth // 按固定带宽计费
+  - set_InstanceChargeType   // ECS计费方式         PrePaid // 包年包月,预设为1个月,自动续费           PostPaid   //  按量计费
+  - set_Password                    // 设置服务器密码
+  - set_Amount                       // 一次创建的数量
+- response:返回创建的实例ID
+
+## 获取实例信息,调用创建接口10s后调用
+- 地址:http://60.205.208.168:5000/describeinstances
+- HTTP Method: POST
+- Parameter:
+  - set_Region:cn-beijing      // 大区
+  - InstanceIds: ["i-2ze7me6g517mxsdknmx2","i-2ze7me6g517mxsdknlki"]  // 实例ID,单次获取上限为100,超过需要翻页,暂时未做,该接口较快,可以多次调用
+- response:  返回 实例ID,付费类型、公网地址、内网地址
+
+## 释放实例
+- 地址:http://60.205.208.168:5000/releaseinstances
+- HTTP Method: POST
+- Parameter:
+  -  set_Region:cn-beijing      // 大区
+  -  InstanceIds: ["i-2ze7me6g517mxsdknmx2","i-2ze7me6g517mxsdknlki"] // 需要释放的实例id数组
+- response: 阿里云request id,没有错误即为成功;因阿里云不支持包年包月的ECS直接释放,如有包年包月类型的释放请求,会先调用计费模式转换,返回会稍慢
+
+
+# EIPChanger
+##  根据内网ip获取实例id
+- 地址: http://60.205.208.168:5000/getinstanceid
+- HTTP Method: POST
+- Parameter:
+  - privateip  // ECS内网ip
+  - region     // 大区
+- response:返回IP和实例ID键值对
+
+## 根据内网IP更换EIP
+- 地址: http://60.205.208.168:5000/reseteip
+- HTTP Method: POST
+- Parameter:
+  - privateip // ECS内网ip
+  - region    // 大区
+- response:返回新EIP,老EIP和完成状态
+
+# ECSMonitorApi
+## 阿里云主机自动聚合监控
+阿里云监控不提供自动的监控项聚合,ECS增删的情况下,需要不断维护各个监控项dashborad
+通过调研阿里云云监控api,发现其提供了获取账号下所有ECS各个监控子项的聚合数据;如果通过接口获取数据,那么就不需要维护监控主机列表了,相对就是做到了主机的自动发现
+该需求主要用于监控数据查找和展示,监控告警则仍然由阿里云监控模板统一匹配和实现
+实现:
+ - 安装一套influxdb用于数据落地
+ - 通过阿里云api获取所有阿里云账户下的所有ECS监控数据
+ - 通过返回的instanceid获取ECS的ip信息,将时间、ip信息和返回的最新监控数值写入influxdb
+ - 因为阿里云接口返回的数据很多有相同的时间戳,influxdb时间作为主键,插入进去数据会被覆盖,需要为时间戳加上随机毫秒单位
+ - 安装一套grafana,数据源为influxdb,设置两个variable,分别是所有公网ip和所有内网ip
+ - 阿里云DescribeMetricLast接口里MetricName即监控项可选参数 [云产品指标说明](https://cloudmonitor.console.aliyun.com/?spm=a2c4g.11186623.2.59.7254ad41IqwKxh#/metricdoc)

+ 1 - 0
pyproject/AliyunTools/aliyun_eipaddress/.python-version

@@ -0,0 +1 @@
+3.7.3

+ 153 - 0
pyproject/AliyunTools/aliyun_eipaddress/ECSCreater.py

@@ -0,0 +1,153 @@
+#!/usr/bin/env python
+#coding=utf-8
+import json
+import uuid
+from config_hbgj import Config
+from word_generater import decodestr
+from aliyunsdkcore.client import AcsClient
+from aliyunsdkcore.acs_exception.exceptions import ClientException
+from aliyunsdkcore.acs_exception.exceptions import ServerException
+from aliyunsdkecs.request.v20140526.CreateInstanceRequest import CreateInstanceRequest
+from aliyunsdkecs.request.v20140526.RunInstancesRequest import RunInstancesRequest
+from aliyunsdkecs.request.v20140526.DescribeInstancesRequest import DescribeInstancesRequest
+from aliyunsdkecs.request.v20140526.ModifyInstanceChargeTypeRequest import ModifyInstanceChargeTypeRequest
+from aliyunsdkecs.request.v20140526.DeleteInstancesRequest import DeleteInstancesRequest
+
+class AliyunECS(object):
+    # if user=gtgj, only allowed ecs.t5-lc1m1.small for ecs type
+    # set_DryRun(True) for debug
+    # rlease instance api ModifyInstanceAutoReleaseTime   https://help.aliyun.com/document_detail/47576.html?spm=a2c4g.11186623.6.1191.66777c25Y5tDMN
+
+    # request.set_ImageId("m-2ze9rkkcv9k4mi693nob")
+    # request.set_InstanceType("ecs.t5-lc1m1.small")
+    # request.set_InstanceName("proxy")
+    # request.set_InternetChargeType("PayByTraffic")
+    # request.set_InternetMaxBandwidthOut(1)
+    # request.set_HostName("porxy")
+    # request.set_Password("password3324!")
+    # request.set_SystemDiskSize(40)
+    # request.set_SystemDiskCategory("cloud_efficiency")
+    # request.set_Description("proxy")
+    # request.set_VSwitchId("vsw-2zeay6fvk4kq6dtvyk8nh")
+    # request.set_InstanceChargeType("PostPaid") 
+    """
+    {
+    	"InstanceId": "i-2zeat29qpyvlcfh6p17v",
+    	"RequestId": "6D3573AF-B8BC-4679-BA62-F1CBDB25CC7D"
+    }
+    """
+
+
+    '''
+        for choice argments:
+        InternetChargeType,  if instanceChargeType==Prepaid then InternetChargeType=PayByBandwidth else PayByTraffic
+        InstanceChargeType # if InstanceChargeType==PrePaid then PeriodUnit=Month or InstanceChargeType=PostPaid
+        InstanceCount
+        Password
+    '''
+
+    '''static argments:
+        ImageId
+        InstanceType
+        InstanceName
+        InternetMaxBandwidthOut
+        HostName
+        SystemDiskSize
+        SystemDiskCategory
+        Description
+        VSwitchId
+    '''
+    # {'RequestId': 'F67D0AE3-8B4A-4A06-88E1-AD037B8D2F3B', 'InstanceIdSets': {'InstanceIdSet': ['i-2ze7anzznp9keos3mzo2', 'i-2ze7anzznp9keos3mzo3']}, 'TradePrice': 53.2} 
+    def __init__(self, user="gtgj", **CreateInfo):
+        
+        metadata = ["RegionId", "AutoReleaseTime", "InstanceCount"]
+        self.CreateInfo = CreateInfo
+        self.set_Region = self.CreateInfo.pop("set_Region")
+        self.CreateInfo['set_InstanceType'] = "ecs.t5-lc1m1.small"
+        self.CreateInfo['set_ImageId'] = "m-2ze9rkkcv9k4mi693nob"
+        self.CreateInfo['set_InstanceName'] = "proxy"
+        self.CreateInfo['set_InternetMaxBandwidthOut'] = 1
+        self.CreateInfo['set_HostName'] = "proxy"
+        self.CreateInfo['set_Description'] = "proxy"
+        self.CreateInfo['set_SecurityGroupId'] = "sg-2ze4a0k1v1gsidwkez2c"
+        self.CreateInfo['set_SystemDiskSize'] = 40
+        self.CreateInfo['set_SystemDiskCategory'] = "cloud_efficiency"
+        self.CreateInfo['set_VSwitchId'] = "vsw-2zeay6fvk4kq6dtvyk8nh"
+        # if self.CreateInfo.get("set_InternetChargeType") == "Prepaid" and self.CreateInfo.get("set_InternetChargeType") == "PayByBandwidth" :
+        if self.CreateInfo.get("set_InstanceChargeType") == "PrePaid":
+             self.CreateInfo['set_PeriodUnit'] = "Month"
+             self.CreateInfo['set_Period'] = 1
+             self.CreateInfo['set_AutoRenewPeriod'] = 1
+             self.CreateInfo['set_AutoRenew'] = True
+
+             self.set_PayType = 1 # 1. PayByBandwidth  2.PayByTraffic
+             
+        else:
+             self.PayType = 0 
+        self.client = AcsClient(decodestr(Config.accessKeyId), decodestr(Config.accessSecret), self.set_Region)
+
+    @property 
+    def doAaction(self):
+        self.request.set_accept_format('json')
+        response = self.client.do_action_with_exception(self.request)
+        return json.loads(str(response, encoding='utf-8')) 
+
+    @property
+    def CreateECSInstance(self):
+        self.request = RunInstancesRequest()
+        ecs_password = self.CreateInfo.get("Password")
+        for k,v in self.CreateInfo.items():
+            print("""self.request.%s("%s") """ % (k, v))
+            exec("""self.request.%s("%s") """ % (k, v))
+        self.request.set_ClientToken(uuid.uuid1())
+        return self.doAaction
+
+    # https://api.aliyun.com/?spm=a2c4g.11186623.2.9.78953290C6BExH#/?product=Ecs&api=DescribeInstances&params={}&tab=DEMO&lang=PYTHON
+    def DescribeInstances(self, InstanceIds):
+        self.request = DescribeInstancesRequest()
+        self.request.set_PageSize(100)
+        # InstanceIds ["i-2ze551ld3zvhp56r2504"]
+        self.request.set_InstanceIds(InstanceIds)
+        Data = self.doAaction
+        InstancesInfo = []
+        for instance in  Data['Instances']["Instance"]:
+            PrivateIpAddress = instance['NetworkInterfaces']["NetworkInterface"][0]["PrimaryIpAddress"]
+            PublicIpAddress = instance["PublicIpAddress"]["IpAddress"][0]
+            InstanceId = instance["InstanceId"]
+            InstanceChargeType = instance["InstanceChargeType"]
+            InstancesInfo.append({'InstanceId':InstanceId, "PublicIpAddress": PublicIpAddress, "PrivateIpAddress":PrivateIpAddress, "InstanceChargeType":InstanceChargeType})
+        return InstancesInfo
+       
+    def ReleaseInstances(self, InstanceIds):
+        InstancesInfo = self.DescribeInstances(InstanceIds)
+        PrePaidInstanceIds = [instance['InstanceId'] for instance in InstancesInfo if instance['InstanceChargeType'] == "PrePaid"]
+        if PrePaidInstanceIds:
+            self.PrePaid2PostPaid(PrePaidInstanceIds)
+        self.request = DeleteInstancesRequest()
+        self.request.set_Force(True)
+        self.request.set_InstanceIds(InstanceIds)
+        return self.doAaction
+        
+
+    def PrePaid2PostPaid(self, InstanceIds):
+        self.request = ModifyInstanceChargeTypeRequest()
+        self.request.set_InstanceChargeType("PostPaid")
+        self.request.set_InstanceIds(InstanceIds)
+        return self.doAaction
+         
+
+if __name__ == "__main__":
+    #instance =  AliyunECS(set_Region="cn-beijing", set_InternetChargeType="PayByTraffic", set_InstanceChargeType="PostPaid", set_Password="Pass@123!", set_Amount=2)
+    #instance =  AliyunECS(set_Region="cn-beijing", set_InternetChargeType="PayByBandwidth", set_InstanceChargeType="PrePaid", set_Password="Pass@123!", set_Amount=2)
+    #print(instance.CreateECSInstance)
+    
+    #instance =  AliyunECS(set_Region="cn-beijing")
+    #print(instance.DescribeInstances(["i-2ze551ld3zvhp56r2504", "i-2ze551ld3zvhp56r250e"]))
+    
+    instance =  AliyunECS(set_Region="cn-beijing")
+    print(instance.ReleaseInstances(["i-2zefwbdpum6qmsl9yx6x","i-2zefwbdpum6qmsl9yx6y"]))       
+
+#    def ModifyECSInstance():
+#
+#    def Run():
+#        self.CreateInfo.get("InstancesCount") 

+ 131 - 0
pyproject/AliyunTools/aliyun_eipaddress/EIPChanger.py

@@ -0,0 +1,131 @@
+#coding=utf-8
+
+import json
+from config import Config
+from word_generater import decodestr
+from aliyunsdkcore.client import AcsClient
+from aliyunsdkcore.acs_exception.exceptions import ClientException
+from aliyunsdkcore.acs_exception.exceptions import ServerException
+from aliyunsdkvpc.request.v20160428.AllocateEipAddressRequest import AllocateEipAddressRequest
+from aliyunsdkvpc.request.v20160428.DescribeEipAddressesRequest import DescribeEipAddressesRequest
+from aliyunsdkvpc.request.v20160428.UnassociateEipAddressRequest import UnassociateEipAddressRequest
+from aliyunsdkvpc.request.v20160428.AssociateEipAddressRequest import AssociateEipAddressRequest
+from aliyunsdkvpc.request.v20160428.ReleaseEipAddressRequest import ReleaseEipAddressRequest
+from aliyunsdkvpc.request.v20160428.ModifyEipAddressAttributeRequest import ModifyEipAddressAttributeRequest
+import aliyunsdkcore
+import time
+
+
+
+
+class AliyunEip(object):
+ 
+    def __init__(self, instanceid, region, *eipid):
+        self.client = AcsClient(decodestr(Config.accessKeyId), decodestr(Config.accessSecret), region)
+        self.instanceid = instanceid 
+        if eipid:
+            self.eipid = eipid[0]
+        else:
+            self.eipid = 0
+        self.bandwidth = "1"
+        
+    # create EIP 
+    def allocateEip(self):
+        self.request = AllocateEipAddressRequest()
+        self.request.set_Bandwidth(self.bandwidth)
+        self.request.set_InternetChargeType("PayByTraffic")
+        return self.doAaction 
+          
+    # display EIP
+    def describeEip(self):
+        self.request = DescribeEipAddressesRequest()
+        if self.eipid:
+            self.request.set_AllocationId(self.eipid)
+            return self.doAaction
+        return
+    
+    # release EIP from vps
+    def unassociateEip(self):
+        self.request = UnassociateEipAddressRequest()
+        self.request.set_AllocationId(self.eipid)
+        self.request.set_InstanceId(self.instanceid)
+        return self.doAaction
+    
+    # bonding EIP to vps
+    def associateEip(self):
+        self.request = AssociateEipAddressRequest()
+        self.request.set_AllocationId(self.eipid)
+        self.request.set_InstanceId(self.instanceid)
+        return self.doAaction
+    
+    # release EIP
+    def releaseEip(self):
+        self.request = ReleaseEipAddressRequest()
+        self.request.set_AllocationId(self.eipid)
+        return self.doAaction
+    
+    # set EIP info 
+    # test EIP name GTGJEIP
+    def modifyEip(self):
+        self.request = ModifyEipAddressAttributeRequest()
+        self.request.set_Name(self.eipname)
+        self.request.set_AllocationId(self.eipid)
+        return self.doAaction
+
+    @property 
+    def doAaction(self):
+        self.request.set_accept_format('json')
+        response = self.client.do_action_with_exception(self.request)
+        return json.loads(str(response, encoding='utf-8'))
+    
+    def eipInfo(self):
+        eipinfo = self.describeEip()
+        d =  {}
+        if eipinfo:
+            for eip in eipinfo['EipAddresses']['EipAddress']:
+                self.eipid = eip['AllocationId']
+                ipaddr = eip['IpAddress']
+                d['IpAddress'] = ipaddr
+                if eip['InstanceType'] == 'EcsInstance':
+                    d['isbonding'] = 1
+                else:
+                    d['isbonding'] = 0
+        return d
+    
+    def run(self):
+        try: 
+            eipinfo = self.eipInfo()
+            sourceip = eipinfo.get('IpAddress')
+            if  eipinfo.get('IpAddress'):
+                if eipinfo['isbonding']:
+                    self.unassociateEip()
+                isbond = self.eipInfo().get('isbonding')
+                while isbond:
+                    isbond = self.eipInfo().get('isbonding')
+                    time.sleep(0.5)              
+                self.releaseEip()
+            iseipexists = self.eipInfo().get(self.eipid)
+            while iseipexists:
+                iseipexists = self.eipInfo().get(self.eipid)
+                time.sleep(0.5)
+            neweip = self.allocateEip()
+            self.eipid = neweip["AllocationId"]
+            #self.modifyEip() 
+            self.associateEip()    
+            eipinfo = self.eipInfo()
+            _neweip = eipinfo['IpAddress']
+            print("replace ip success.")
+            return {"status":"success", "sourceip": sourceip, "neweip": _neweip}
+        except aliyunsdkcore.acs_exception.exceptions.ServerException as  e:
+            return {"ERROR": str(e)}
+       # except:
+       #     return  {"status":"false"} 
+         
+         
+#print(AliyunEip('cn-shenzhen').unassociateEip())
+if __name__ == "__main__":
+    #instance = (AliyunEip('i-wz9dzuvasloqyryn6xod', "cn-hangzhou", 'eip-wz94nj3izkmakzuf3r254'))
+    instance = (AliyunEip('i-bp11ah3f4dtxjezay2oo', "cn-hangzhou", ''))
+    print(instance.run())
+
+    # print(instance.allocateEip())

+ 30 - 0
pyproject/AliyunTools/aliyun_eipaddress/InstanceID.py

@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#coding=utf-8
+from pprint import pprint
+import json
+from config import Config
+from word_generater import decodestr
+from aliyunsdkcore.client import AcsClient
+from aliyunsdkcore.acs_exception.exceptions import ClientException
+from aliyunsdkcore.acs_exception.exceptions import ServerException
+from aliyunsdkecs.request.v20140526.DescribeInstancesRequest import DescribeInstancesRequest
+
+
+def privateIP2InstanceID(ip,region):
+    d = {}
+    client = AcsClient(decodestr(Config.accessKeyId), decodestr(Config.accessSecret), region)
+    request = DescribeInstancesRequest()
+    request.set_accept_format('json')
+    request.set_PrivateIpAddresses([ip])
+    response = client.do_action_with_exception(request)
+    result = json.loads(str(response, encoding='utf-8'))
+    eipinfo = result['Instances']["Instance"][0].get('EipAddress')
+    if eipinfo:
+        d['eipid'] = eipinfo['AllocationId']
+    d['instanceid'] = result['Instances']["Instance"][0]["InstanceId"]
+    return d
+
+
+if __name__ == "__main__":
+    #print(privateIP2InstanceID("172.18.135.107"))
+    print(privateIP2InstanceID("172.16.181.135","cn-hangzhou"))

BIN
pyproject/AliyunTools/aliyun_eipaddress/__pycache__/EIPChanger.cpython-37.pyc


BIN
pyproject/AliyunTools/aliyun_eipaddress/__pycache__/InstanceID.cpython-37.pyc


BIN
pyproject/AliyunTools/aliyun_eipaddress/__pycache__/config.cpython-37.pyc


BIN
pyproject/AliyunTools/aliyun_eipaddress/__pycache__/index.cpython-37.pyc


BIN
pyproject/AliyunTools/aliyun_eipaddress/__pycache__/word_generater.cpython-37.pyc


+ 78 - 0
pyproject/AliyunTools/aliyun_eipaddress/call/interfacecaller.py

@@ -0,0 +1,78 @@
+import requests
+
+url = "http://60.205.208.168:5000/reseteip"
+
+instances = {"172.16.181.135":"cn-hangzhou",
+             "172.16.181.67":"cn-hangzhou",
+             "172.16.181.52":"cn-hangzhou",
+             "172.16.181.68":"cn-hangzhou",
+             "172.16.181.49":"cn-hangzhou",
+             "172.16.181.50":"cn-hangzhou",
+             "172.16.181.110":"cn-hangzhou",
+             "172.16.181.38":"cn-hangzhou",
+             "172.16.181.113":"cn-hangzhou",
+             "172.16.181.40":"cn-hangzhou",
+             "172.16.181.42":"cn-hangzhou",
+             "172.16.181.44":"cn-hangzhou",
+             "172.16.181.115":"cn-hangzhou",
+             "172.16.181.111":"cn-hangzhou",
+             "172.16.181.45":"cn-hangzhou",
+             "172.16.181.43":"cn-hangzhou",
+             "172.16.181.39":"cn-hangzhou",
+             "172.16.181.41":"cn-hangzhou",
+             "172.31.159.115":"cn-qingdao",
+             "172.31.159.140":"cn-qingdao",
+             "172.31.159.112":"cn-qingdao",
+             "172.31.159.132":"cn-qingdao",
+             "172.31.159.116":"cn-qingdao",
+             "172.31.159.101":"cn-qingdao",
+             "172.31.159.176":"cn-qingdao",
+             "172.31.159.228":"cn-qingdao",
+             "172.31.159.109":"cn-qingdao",
+             "172.31.159.130":"cn-qingdao",
+             "172.31.159.118":"cn-qingdao",
+             "172.31.159.149":"cn-qingdao",
+             "172.31.159.100":"cn-qingdao",
+             "172.31.159.117":"cn-qingdao",
+             "172.31.159.128":"cn-qingdao",
+             "172.31.159.167":"cn-qingdao",
+             "172.31.159.142":"cn-qingdao",
+             "172.31.159.114":"cn-qingdao",
+             "172.31.159.183":"cn-qingdao",
+             "172.31.159.104":"cn-qingdao",
+             "172.31.159.169":"cn-qingdao",
+             "172.31.159.171":"cn-qingdao",
+             "172.31.159.94":"cn-qingdao",
+             "172.31.159.170":"cn-qingdao",
+             "172.31.159.110":"cn-qingdao",
+             "172.31.159.126":"cn-qingdao",
+             "172.31.159.79":"cn-qingdao",
+             "172.31.159.119":"cn-qingdao",
+             "172.31.159.97":"cn-qingdao",
+             "172.31.159.90":"cn-qingdao",
+             "172.31.159.113":"cn-qingdao",
+             "172.31.159.144":"cn-qingdao",
+             "172.31.159.133":"cn-qingdao",
+             "172.31.159.148":"cn-qingdao",
+             "172.31.159.72":"cn-qingdao",
+             "172.31.159.65":"cn-qingdao",
+             "172.31.159.129":"cn-qingdao",
+             "172.31.159.83":"cn-qingdao",
+             "172.31.159.84":"cn-qingdao",
+             "172.31.159.106":"cn-qingdao",
+             "172.31.159.103":"cn-qingdao",
+             "172.31.159.123":"cn-qingdao",
+             "172.31.159.71":"cn-qingdao",
+             "172.31.159.105":"cn-qingdao",
+             "172.31.159.107":"cn-qingdao",
+             "172.31.159.82":"cn-qingdao",
+             "172.31.144.56":"cn-qingdao",
+             "172.31.159.111":"cn-qingdao",
+             "172.31.159.85":"cn-qingdao",
+             "172.31.159.63":"cn-qingdao"
+             }
+
+for ip,region in instances.items():
+    data = {"privateip": ip, "region": region}
+    r = requests.post(url, data)
+    print(r.text)

+ 3 - 0
pyproject/AliyunTools/aliyun_eipaddress/config.py

@@ -0,0 +1,3 @@
+class Config():
+    accessKeyId = "encodestr"
+    accessSecret = "encodestr"

+ 3 - 0
pyproject/AliyunTools/aliyun_eipaddress/config_hbgj.py

@@ -0,0 +1,3 @@
+class Config():
+    accessKeyId = "PFRBSWFK0NqaH8rxF6lnOA"
+    accessSecret = "Sb2xOHh0UTB1U2EzdVld6NjhrYUVhY22gAWHRn"

+ 17 - 0
pyproject/AliyunTools/aliyun_eipaddress/howtorun.txt

@@ -0,0 +1,17 @@
+# run command:
+gunicorn -w 4 -b :5000 index:app --access-logfile logs/access.log --error-logfile logs/error.log --log-level=debug --pid run/gunicorn.pid -D
+
+# /etc/logrotate.d/gunicorn
+
+/opt/aliyun_eipaddress/logs/*.log {
+daily
+rotate 30
+compress
+dateext
+dateformat .%Y-%m-%d
+notifempty
+sharedscripts
+postrotate
+    kill -USR1 $(cat /opt/aliyun_eipaddress/run/gunicorn.pid)
+endscript
+}

+ 75 - 0
pyproject/AliyunTools/aliyun_eipaddress/index.py

@@ -0,0 +1,75 @@
+# coding: utf-8
+from flask import Flask, request
+from flask import jsonify
+import json
+from InstanceID import privateIP2InstanceID
+from EIPChanger import AliyunEip
+from ECSCreater import AliyunECS
+import logging
+
+logging.basicConfig(level=logging.DEBUG,
+                    format='%(asctime)s %(levelname)s %(message)s',
+                    datefmt='%Y-%m-%d %H:%M:%S',
+                    filename='logs/debug.log',
+                    filemode='a')
+
+
+app = Flask(__name__)
+@app.route("/instaceid", methods=['POST'])
+def getinstanceid():
+    privateip = request.form['privateip']
+    region=request.form['region']
+    result = privateIP2InstanceID(privateip, region)
+    return jsonify(result)
+
+
+@app.route("/reseteip", methods=['POST'])
+def reseteip():
+    # ""
+    #data = json.loads(request.get_data(as_text=True))
+    privateip = request.form['privateip']
+    region=request.form['region']
+    d = privateIP2InstanceID(privateip,region) 
+    instanceid = d['instanceid']
+    if d.get('eipid'):
+        eipid = d['eipid']
+        runner = AliyunEip(instanceid, region, eipid)
+    else:
+        runner = AliyunEip(instanceid, region)
+    data = runner.run()
+    return jsonify(data)
+
+@app.route("/createecs", methods=['POST'])
+def createecs():
+    # set_Region
+    # set_InternetChargeType  PayByTraffic or PayByBandwidth
+    # set_InstanceChargeType  PostPaid     or PrePaid
+    # set_Password
+    # set_Amount
+    set_Region = request.form['set_Region']
+    set_InternetChargeType = request.form['set_InternetChargeType']
+    set_InstanceChargeType = request.form['set_InstanceChargeType']
+    set_Password = request.form['set_Password']
+    set_Amount = request.form['set_Amount']
+    instance = AliyunECS(set_Region=set_Region, set_InternetChargeType=set_InternetChargeType, set_InstanceChargeType=set_InstanceChargeType, set_Password=set_Password, set_Amount=set_Amount)
+    result = instance.CreateECSInstance
+    return jsonify(result)
+
+@app.route("/describeinstances", methods=['POST'])
+def describeinstances():
+    set_Region = request.form['set_Region']
+    InstanceIds = eval(request.form['InstanceIds'])
+    instance = AliyunECS(set_Region=set_Region)
+    result = instance.DescribeInstances(InstanceIds)
+    return jsonify(result)
+
+@app.route("/releaseinstances", methods=['POST'])
+def releaseinstances():
+    set_Region = request.form['set_Region']
+    InstanceIds = eval(request.form['InstanceIds'])
+    instance = AliyunECS(set_Region=set_Region)
+    result = instance.ReleaseInstances(InstanceIds)
+    return jsonify(result)
+
+if __name__ == "__main__":
+    app.run(host="0.0.0.0")

+ 2 - 0
pyproject/AliyunTools/aliyun_eipaddress/logs/access.log

@@ -0,0 +1,2 @@
+27.17.10.210 - - [10/Sep/2019:14:25:09 +0800] "GET /reseteip HTTP/1.1" 405 178 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36"
+27.17.10.210 - - [10/Sep/2019:14:25:09 +0800] "GET /favicon.ico HTTP/1.1" 404 232 "http://60.205.208.168:5000/reseteip" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36"

+ 0 - 0
pyproject/AliyunTools/aliyun_eipaddress/logs/debug.log


+ 182 - 0
pyproject/AliyunTools/aliyun_eipaddress/logs/error.log

@@ -0,0 +1,182 @@
+[2019-09-10 14:21:43 +0800] [23303] [DEBUG] Current configuration:
+  config: None
+  bind: [':5000']
+  backlog: 2048
+  workers: 4
+  worker_class: sync
+  threads: 1
+  worker_connections: 1000
+  max_requests: 0
+  max_requests_jitter: 0
+  timeout: 30
+  graceful_timeout: 30
+  keepalive: 2
+  limit_request_line: 4094
+  limit_request_fields: 100
+  limit_request_field_size: 8190
+  reload: False
+  reload_engine: auto
+  reload_extra_files: []
+  spew: False
+  check_config: False
+  preload_app: False
+  sendfile: None
+  reuse_port: False
+  chdir: /opt/aliyun_eipaddress
+  daemon: True
+  raw_env: []
+  pidfile: None
+  worker_tmp_dir: None
+  user: 0
+  group: 0
+  umask: 0
+  initgroups: False
+  tmp_upload_dir: None
+  secure_scheme_headers: {'X-FORWARDED-PROTOCOL': 'ssl', 'X-FORWARDED-PROTO': 'https', 'X-FORWARDED-SSL': 'on'}
+  forwarded_allow_ips: ['127.0.0.1']
+  accesslog: logs/access.log
+  disable_redirect_access_to_syslog: False
+  access_log_format: %(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"
+  errorlog: logs/error.log
+  loglevel: debug
+  capture_output: False
+  logger_class: gunicorn.glogging.Logger
+  logconfig: None
+  logconfig_dict: {}
+  syslog_addr: udp://localhost:514
+  syslog: False
+  syslog_prefix: None
+  syslog_facility: user
+  enable_stdio_inheritance: False
+  statsd_host: None
+  statsd_prefix: 
+  proc_name: None
+  default_proc_name: index:app
+  pythonpath: None
+  paste: None
+  on_starting: <function OnStarting.on_starting at 0x7f52aa4b3950>
+  on_reload: <function OnReload.on_reload at 0x7f52aa4b3a60>
+  when_ready: <function WhenReady.when_ready at 0x7f52aa4b3b70>
+  pre_fork: <function Prefork.pre_fork at 0x7f52aa4b3c80>
+  post_fork: <function Postfork.post_fork at 0x7f52aa4b3d90>
+  post_worker_init: <function PostWorkerInit.post_worker_init at 0x7f52aa4b3ea0>
+  worker_int: <function WorkerInt.worker_int at 0x7f52aa226048>
+  worker_abort: <function WorkerAbort.worker_abort at 0x7f52aa226158>
+  pre_exec: <function PreExec.pre_exec at 0x7f52aa226268>
+  pre_request: <function PreRequest.pre_request at 0x7f52aa226378>
+  post_request: <function PostRequest.post_request at 0x7f52aa226400>
+  child_exit: <function ChildExit.child_exit at 0x7f52aa226510>
+  worker_exit: <function WorkerExit.worker_exit at 0x7f52aa226620>
+  nworkers_changed: <function NumWorkersChanged.nworkers_changed at 0x7f52aa226730>
+  on_exit: <function OnExit.on_exit at 0x7f52aa226840>
+  proxy_protocol: False
+  proxy_allow_ips: ['127.0.0.1']
+  keyfile: None
+  certfile: None
+  ssl_version: 2
+  cert_reqs: 0
+  ca_certs: None
+  suppress_ragged_eofs: True
+  do_handshake_on_connect: False
+  ciphers: TLSv1
+  raw_paste_global_conf: []
+[2019-09-10 14:21:43 +0800] [23303] [INFO] Starting gunicorn 19.9.0
+[2019-09-10 14:21:43 +0800] [23303] [DEBUG] Arbiter booted
+[2019-09-10 14:21:43 +0800] [23303] [INFO] Listening at: http://0.0.0.0:5000 (23303)
+[2019-09-10 14:21:43 +0800] [23303] [INFO] Using worker: sync
+[2019-09-10 14:21:43 +0800] [23309] [INFO] Booting worker with pid: 23309
+[2019-09-10 14:21:43 +0800] [23310] [INFO] Booting worker with pid: 23310
+[2019-09-10 14:21:43 +0800] [23311] [INFO] Booting worker with pid: 23311
+[2019-09-10 14:21:43 +0800] [23312] [INFO] Booting worker with pid: 23312
+[2019-09-10 14:21:43 +0800] [23303] [DEBUG] 4 workers
+[2019-09-10 14:25:09 +0800] [23311] [DEBUG] GET /reseteip
+[2019-09-10 14:25:09 +0800] [23309] [DEBUG] GET /favicon.ico
+[2019-09-10 14:27:36 +0800] [23725] [DEBUG] Current configuration:
+  config: None
+  bind: [':5000']
+  backlog: 2048
+  workers: 4
+  worker_class: sync
+  threads: 1
+  worker_connections: 1000
+  max_requests: 0
+  max_requests_jitter: 0
+  timeout: 30
+  graceful_timeout: 30
+  keepalive: 2
+  limit_request_line: 4094
+  limit_request_fields: 100
+  limit_request_field_size: 8190
+  reload: False
+  reload_engine: auto
+  reload_extra_files: []
+  spew: False
+  check_config: False
+  preload_app: False
+  sendfile: None
+  reuse_port: False
+  chdir: /opt/aliyun_eipaddress
+  daemon: True
+  raw_env: []
+  pidfile: run/gunicorn.pid
+  worker_tmp_dir: None
+  user: 0
+  group: 0
+  umask: 0
+  initgroups: False
+  tmp_upload_dir: None
+  secure_scheme_headers: {'X-FORWARDED-PROTOCOL': 'ssl', 'X-FORWARDED-PROTO': 'https', 'X-FORWARDED-SSL': 'on'}
+  forwarded_allow_ips: ['127.0.0.1']
+  accesslog: logs/access.log
+  disable_redirect_access_to_syslog: False
+  access_log_format: %(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"
+  errorlog: logs/error.log
+  loglevel: debug
+  capture_output: False
+  logger_class: gunicorn.glogging.Logger
+  logconfig: None
+  logconfig_dict: {}
+  syslog_addr: udp://localhost:514
+  syslog: False
+  syslog_prefix: None
+  syslog_facility: user
+  enable_stdio_inheritance: False
+  statsd_host: None
+  statsd_prefix: 
+  proc_name: None
+  default_proc_name: index:app
+  pythonpath: None
+  paste: None
+  on_starting: <function OnStarting.on_starting at 0x7ff18c551950>
+  on_reload: <function OnReload.on_reload at 0x7ff18c551a60>
+  when_ready: <function WhenReady.when_ready at 0x7ff18c551b70>
+  pre_fork: <function Prefork.pre_fork at 0x7ff18c551c80>
+  post_fork: <function Postfork.post_fork at 0x7ff18c551d90>
+  post_worker_init: <function PostWorkerInit.post_worker_init at 0x7ff18c551ea0>
+  worker_int: <function WorkerInt.worker_int at 0x7ff18c2c4048>
+  worker_abort: <function WorkerAbort.worker_abort at 0x7ff18c2c4158>
+  pre_exec: <function PreExec.pre_exec at 0x7ff18c2c4268>
+  pre_request: <function PreRequest.pre_request at 0x7ff18c2c4378>
+  post_request: <function PostRequest.post_request at 0x7ff18c2c4400>
+  child_exit: <function ChildExit.child_exit at 0x7ff18c2c4510>
+  worker_exit: <function WorkerExit.worker_exit at 0x7ff18c2c4620>
+  nworkers_changed: <function NumWorkersChanged.nworkers_changed at 0x7ff18c2c4730>
+  on_exit: <function OnExit.on_exit at 0x7ff18c2c4840>
+  proxy_protocol: False
+  proxy_allow_ips: ['127.0.0.1']
+  keyfile: None
+  certfile: None
+  ssl_version: 2
+  cert_reqs: 0
+  ca_certs: None
+  suppress_ragged_eofs: True
+  do_handshake_on_connect: False
+  ciphers: TLSv1
+  raw_paste_global_conf: []
+[2019-09-10 14:27:36 +0800] [23725] [INFO] Listening at: http://0.0.0.0:5000 (23725)
+[2019-09-10 14:27:36 +0800] [23725] [INFO] Using worker: sync
+[2019-09-10 14:27:36 +0800] [23735] [INFO] Booting worker with pid: 23735
+[2019-09-10 14:27:36 +0800] [23736] [INFO] Booting worker with pid: 23736
+[2019-09-10 14:27:36 +0800] [23741] [INFO] Booting worker with pid: 23741
+[2019-09-10 14:27:36 +0800] [23742] [INFO] Booting worker with pid: 23742
+[2019-09-10 14:27:36 +0800] [23725] [DEBUG] 4 workers

+ 1 - 0
pyproject/AliyunTools/aliyun_eipaddress/run/gunicorn.pid

@@ -0,0 +1 @@
+23725

+ 15 - 0
pyproject/AliyunTools/aliyun_eipaddress/word_generater.py

@@ -0,0 +1,15 @@
+# conding: utf-8
+import base64
+import sys
+
+def decodestr(s):
+    dns = s[5:].replace(s[-12:-4],'') + '=='
+    return str(base64.b64decode(dns.encode('utf-8')),'utf-8')
+
+if __name__ == "__main__":
+    if len(sys.argv) != 3:
+        print("usage: word_generater.py [decodestr]/[encodestr] [string]")
+    if sys.argv[1] == "decode" :
+        print(decodestr(sys.argv[2]))
+    if sys.argv[1] == "encode" :
+        print(encodestr(sys.argv[2]))

+ 9 - 0
pyproject/Es2Mongo/README.md

@@ -0,0 +1,9 @@
+# 转储elasticsearch tracelog至mongodb
+
+# 环境要求:
+ - 扩展requirements: pymongo, redis, elasticsearch
+ - 本地安装redis-server, mongodb
+
+## usage:
+ - python transdata.py [int number]
+ - [int number]: 当前日期之前的任意天数,0为当日

+ 111 - 0
pyproject/Es2Mongo/esdata2mongo.py

@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+import time
+from datetime import datetime
+from elasticsearch import Elasticsearch
+from pprint import pprint
+import pymongo
+import redis
+import logging
+
+
+# # 创建对象时设置标志timeout来实现,如果已创建对象而未指定超时值,则可以通过在查询中使用request_timeout标志来设置特定请求的超时值
+# # curl -XPOST http://example.comr:9200/my_index/_close
+# # curl -XPOST http://example.comr:9200/my_index/_open
+
+# # logstash-trace-2019.06.10.08
+
+# res = es.search(index='logstash-trace-2019.06.10.08', scroll = '2m', size = 5)  #scroll = '2m'游标保持2分钟
+
+
+
+class Es2Mongo(object):
+    def __init__(self, index_name, entry_num):
+        self.es = Elasticsearch(
+            ['172.29.8.37'],
+            http_auth=('admin', 'openet123qwe'),
+            scheme="http",
+            port=9200,
+            timeout=60
+        )
+        self.mongo_client = pymongo.MongoClient('mongodb://localhost:27017/')
+        self.redis_conn = redis.StrictRedis(host='127.0.0.1', port=6379, db=0)
+        self.index_name = index_name
+        self.entry_num = entry_num
+        #self.logger = logsetting(self.index_name)
+
+    def DataWithScroll(self, entry_num, scroll='2h', *scroll_id):
+        if scroll_id:
+            scroll_id = scroll_id[0]
+            res = self.es.scroll(scroll_id=scroll_id, scroll=scroll)
+        else:
+            res = self.es.search(index=self.index_name,
+                                 scroll=scroll, size=entry_num)
+        return res
+
+    def FormatJson(self, json_data):
+        _source = json_data['_source']
+        if _source.get('tags'):
+            _source['tags'] = str(_source['tags'])
+        return _source
+
+    def FormatDBName(self):
+        date_name = '-'.join(self.index_name.split('.')[:-1])
+        echo_hour_name = '-'.join(self.index_name.split('.'))
+        return date_name, echo_hour_name
+
+    def Log2Redis(self, value):
+        key, field = self.FormatDBName()
+        self.redis_conn.hset(key, field, value)
+
+    def InsertIntoMongo(self, josn_data):
+        dbname, collectionname = self.FormatDBName()
+        db = self.mongo_client[dbname]
+        collection = db[collectionname]
+        collection.autoIndexId = False
+        collection.insert_one(josn_data)
+
+    def es2mongo(self, all):
+        datas = self.DataWithScroll(self.entry_num)
+        scroll_id = datas['_scroll_id']
+        size_all = datas['hits']['total']
+        scroll_size = 1
+        for data in datas['hits']['hits']:
+            data = self.FormatJson(data)
+            #self.InsertIntoMongo(data)
+        n = self.entry_num
+        last_mess = 0
+        last_data_count = n
+        start_time = int(time.time())
+        if all:
+            while (scroll_size > 0):
+                datas = self.DataWithScroll(self.entry_num, '2h', scroll_id)
+                scroll_id = datas['_scroll_id']
+                scroll_size = len(datas['hits']['hits'])
+                n += self.entry_num
+                unix_timestamp = int(time.time())
+                if not unix_timestamp % 60 and (unix_timestamp - last_mess) > 1 or (unix_timestamp - last_mess) > 10:
+                    # print("%s total:%s. left:%s.  time cost:%ss. query per second:%s, completed percent:%.3f%%" % (datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S"),
+                    #                                                                                          size_all, size_all - n, unix_timestamp - start_time, (n - last_data_count) / (unix_timestamp - last_mess), float(n) * 100 / float(size_all)))
+                    # self.logger.info("total:%s. left:%s.  time cost:%ss. query per second:%s, completed percent:%.3f%%" % (size_all, size_all - n, unix_timestamp - start_time, (n - last_data_count) / (unix_timestamp - last_mess), float(n) * 100 / float(size_all)))
+                    self.Log2Redis("%s total:%s. left:%s.  time cost:%ss. query per second:%s, completed percent:%.3f%%" % (datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S"),
+                                                                                                                                  size_all, size_all - n, unix_timestamp - start_time, (n - last_data_count) / (unix_timestamp - last_mess), float(n) * 100 / float(size_all)))
+                    last_mess = unix_timestamp
+                    last_data_count = n
+
+                for data in datas['hits']['hits']:
+                    data = self.FormatJson(data)
+                    #self.InsertIntoMongo(data)
+            else:
+                # print("%s time total:%s. left:%s.  time cost:%ss. completed percent:%100" % (datetime.strftime(
+                #    datetime.now(), "%Y-%m-%d %H:%M:%S"), size_all, 0, unix_timestamp - start_time))
+                #self.logger.info("%s time total:%s. left:%s.  time cost:%ss. completed percent:%100" % (
+                #    size_all, 0, unix_timestamp - start_time))
+                self.Log2Redis(1)
+                pass
+
+
+
+# 5-570  1000-2600  2000-3000
+#Es2Mongo('logstash-trace-2019.06.10.11', 2000).es2mongo(all=True)

+ 84 - 0
pyproject/Es2Mongo/transdata.py

@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+from elasticsearch import Elasticsearch
+from esdata2mongo import Es2Mongo
+import multiprocessing
+import datetime
+import sys
+import redis
+import time
+import logging
+
+# settings
+redis_ip = "127.0.0.1"
+redis_port = 6379
+
+def index_list(some_days_ago):
+
+    '''
+    some_days_ago: 
+    any days befor today. 
+    '''
+
+    indices = []
+    for hour in range(0, 24):
+        hour = "%02d" % hour
+        index = "%s-%s.%s" % ("logstash-trace", (datetime.datetime.now() -
+                                                 datetime.timedelta(days=some_days_ago)).strftime("%Y.%m.%d"), hour)
+        indices.append(index)
+    return indices
+
+
+def logsetting(logname):
+    logger = logging.getLogger(__name__)
+    logger.setLevel(level=logging.INFO)
+    handler = logging.FileHandler(logname + '.log')
+    handler.setLevel(logging.INFO)
+    formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
+    handler.setFormatter(formatter)
+    logger.addHandler(handler)
+    return logger
+
+
+def progress2log(some_days_ago):
+    indices = index_list(some_days_ago)
+    logname = '-'.join(indices[0].split('.')[:-1])
+    logger = logsetting(logname)
+    r = redis.StrictRedis(host=redis_ip, port=redis_port, db=0)
+    n = 0
+    while n != 24:
+        for index in indices:	 
+            key, field=Es2Mongo(index, 2000).FormatDBName()       
+            value = r.hget(key, field)
+            if n == 24:
+                return 1
+            if value == 1:
+            	logger.info("%s %s" % (index, "Data Processing Complete."))
+                n += 1
+            else:
+                logger.info("%s %s" % (index, value))
+        time.sleep(30)
+
+
+def run(some_days_ago):
+    plist = []
+    indices = index_list(some_days_ago)
+    for index in indices:
+        p = Es2Mongo(index, 2000)
+        process = multiprocessing.Process(target=p.es2mongo, args=(1,))
+        plist.append(process)
+    p1 = multiprocessing.Process(
+            target=progress2log, args=(some_days_ago,))
+    plist.append(p1)
+    for p in plist:
+        p.start()
+    print "all job are running..."
+
+
+if __name__ == "__main__":
+    if len(sys.argv) < 2:
+        print "you need to specify parameters."
+        sys.exit(1)
+    some_days_ago = int(sys.argv[1])
+    run(some_days_ago)

+ 138 - 0
pyproject/zabbixAPItools/jvmthread_graph_creater.py

@@ -0,0 +1,138 @@
+#! /usr/bin/env python
+# coding: utf-8
+import requests
+import json
+from pprint import pprint
+
+
+
+class zabbixApi(object):
+    def __init__(self):
+        self.url = 'http://127.0.0.1:9091/zabbix/api_jsonrpc.php'
+        self.headers = {'Content-Type': 'application/json'}
+        auth = {
+            "jsonrpc": "2.0",
+            "method": "user.login",
+            "params": {
+                "user": "duh",
+                "password":"EPy++z@BUT3X"
+            },
+            "id": 1,
+            "auth":None,
+        }
+        response = requests.post(self.url, data=json.dumps(auth), headers=self.headers)
+        self.authid = json.loads(response.text)['result']
+        self.colors = ["1A7C11", "F63100", "2774A4", "A54F10", "FC6EA3", "6C59DC", "AC8C14", "611F27", "F230E0", "5CCD18", "BB2A02", "5A2B57", "89ABF8", "274482", "2B5429", "2774A4", "AC8C14", "611F27", "F230E0", "5CCD18", "BB2A02", "5A2B57", "89ABF8", "274482", "8048B4", "2B5429", "FD5434", "790E1F", "87AC4D", "000088", "000088"] 
+    def from_itemid_to_value(self, itemid):
+        sql = "select value_avg from trends_uint where itemid=%s  order by clock desc limit 1" % itemid
+
+    def get_hosts(self):
+        neirong={
+            "jsonrpc": "2.0",
+            "method": "host.get",
+            "params": {
+                "output": [
+                    "hostid",
+                    "host"
+                ],
+                "selectInterfaces": [
+                    "interfaceid",
+                    "ip"
+                ]
+            },
+            "id": 2,
+            "auth": self.authid
+        }
+        response = requests.post(self.url, data=json.dumps(neirong), headers=self.headers)
+        host_ids = []
+        for host in response.json()['result']:
+            host_ids.append(host['hostid'])
+        return host_ids
+
+#	java.pid.thread
+
+
+    def get_jvmthread_host_item(self):
+        host_ids = self.get_hosts()
+        filter_result ={}
+        d = {}
+        for hostid in host_ids:
+            neirong={
+                "jsonrpc": "2.0",
+                "method": "item.get",
+                "params": {
+                    "output": "extend",
+                    "hostids": hostid,
+                    "sortfield": "name"
+                },
+                "auth": self.authid,
+                "id": 1
+            }
+            response = requests.post(self.url, data=json.dumps(neirong), headers=self.headers)
+            result = response.json()['result']
+            l = []
+            for item in result:
+                if "Java pid  " in item['name']:
+                    l.append(item['itemid'])
+            if l :
+                d[hostid] = l    
+        return d
+
+    def get_graph_by_name(self,hostid):
+        neirong={
+                "jsonrpc": "2.0",
+                "method": "graph.get",
+                "params": {
+                    "output": "extend",
+                    "hostids": hostid,
+                    "sortfield": "name"
+                },
+                "auth": self.authid,
+                "id": 1
+            }
+        response = requests.post(self.url, data=json.dumps(neirong), headers=self.headers)
+        result = response.json()['result'] 
+        for item in result:
+            if item['name'] == "jvm threads":
+                 return item["graphid"]
+        return
+
+# action graph.update / graph.create
+
+    def graph_define(self, action, hostid, itemids, *graphid):
+        gitems = []
+        for tup in zip(itemids, self.colors):
+            gitems.append({"itemid":tup[0], "color": tup[1]})
+        neirong={
+                "jsonrpc": "2.0",
+                "method": action,
+                "params": {
+                    "name": "jvm threads",
+                    "width": 900,
+                    "height":200,
+                    "hostids": hostid,
+                    "gitems": gitems
+                },
+                "auth": self.authid,
+                "id": 1
+            }
+        if graphid:
+            graphid = graphid[0]
+            neirong["params"]["graphid"] = graphid
+        response = requests.post(self.url, data=json.dumps(neirong), headers=self.headers)
+        result = response.json()
+        print result
+    
+    def run(self):
+        host_item = self.get_jvmthread_host_item()
+        for host,items in host_item.items():
+            graphid = self.get_graph_by_name(host)
+            if not graphid:
+                self.graph_define("graph.create" ,host, items)
+            else:
+                self.graph_define("graph.update", host, items, graphid)
+         
+        
+if __name__ == "__main__":        
+    zabbixApi().run()
+

+ 259 - 0
pyproject/zabbixAPItools/zabbix_add_screen.py

@@ -0,0 +1,259 @@
+#! /usr/bin/env python
+# coding: utf-8
+import requests
+import json
+import pymysql
+from pprint import pprint
+
+
+
+class zabbixApi(object):
+    def __init__(self):
+        self.url = 'http://127.0.0.1:9091/zabbix/api_jsonrpc.php'
+        self.headers = {'Content-Type': 'application/json'}
+        auth = {
+            "jsonrpc": "2.0",
+            "method": "user.login",
+            "params": {
+                "user": "user",
+                "password":"password"
+            },
+            "id": 1,
+            "auth":None,
+        }
+        response = requests.post(self.url, data=json.dumps(auth), headers=self.headers)
+        self.authid = json.loads(response.text)['result']
+
+        conn = pymysql.connect(
+            host="127.0.0.1",
+            user="zabbix",
+            password="openet123",
+            database="zabbix",
+            charset="utf8")
+        self.cursor = conn.cursor()
+    
+    def from_itemid_to_value(self, itemid):
+        sql = "select value_avg from trends_uint where itemid=%s  order by clock desc limit 1" % itemid
+        self.cursor.execute(sql)
+        data =  self.cursor.fetchall()
+        try:
+            value = data[0][0]
+        except:
+            value = 0
+        return value
+   
+    def from_graphid_to_itemid(self, graphid):
+        sql = "select itemid  from graphs_items where graphid= %s" % graphid 
+        self.cursor.execute(sql)
+        ##### tobe continue
+        data = self.cursor.fetchall()
+        _in, _out = data
+        _in = _in[0]
+        _out = _out[0]
+        _in_v = self.from_itemid_to_value(_in)
+        _out_v = self.from_itemid_to_value(_out)
+        if _in_v > _out_v :
+            _out_v, _in_v  = _in_v, _out_v
+        return _out_v
+        
+	
+
+    def get_hosts(self):
+        neirong={
+            "jsonrpc": "2.0",
+            "method": "host.get",
+            "params": {
+                "output": [
+                    "hostid",
+                    "host"
+                ],
+                "selectInterfaces": [
+                    "interfaceid",
+                    "ip"
+                ]
+            },
+            "id": 2,
+            "auth": self.authid
+        }
+        response = requests.post(self.url, data=json.dumps(neirong), headers=self.headers)
+        host_ids = []
+        for host in response.json()['result']:
+            host_ids.append(host['hostid'])
+        return host_ids
+    
+    def get_conn_value(self):
+        host_ids = self.get_hosts()
+        filter_result ={}
+        for hostid in host_ids:
+            neirong={
+                "jsonrpc": "2.0",
+                "method": "item.get",
+                "params": {
+                    "output": "extend",
+                    "hostids": hostid,
+                    "search": {
+                        "key_": "conn.all"
+                    },
+                    "sortfield": "name"
+                },
+                "auth": self.authid,
+                "id": 1
+            }
+            response = requests.post(self.url, data=json.dumps(neirong), headers=self.headers)
+            result = response.json()['result']
+            for item in result:
+               # if item.get('name') == "conn.all":
+               # filter_result[item.get("itemid")] = int(item.get("lastvalue"))
+                filter_result[hostid] = int(item.get("lastvalue"))
+        return sorted(filter_result.items(), key=lambda item:item[1], reverse=True)
+
+
+    def get_cpu_value(self):
+        host_ids = self.get_hosts()
+        filter_result ={}
+        for hostid in host_ids:
+            neirong={
+                "jsonrpc": "2.0",
+                "method": "item.get",
+                "params": {
+                    "output": "extend",
+                    "hostids": hostid,
+                    "search": { 
+                        "key_": "system.cpu.util[,user]"
+                    },
+                    "sortfield": "name"
+                },
+                "auth": self.authid,
+                "id": 1
+            }
+            response = requests.post(self.url, data=json.dumps(neirong), headers=self.headers)
+            result = response.json()['result']
+            for item in result:
+               # if item.get('name') == "conn.all":
+               # filter_result[item.get("itemid")] = int(item.get("lastvalue"))
+                filter_result[hostid] = float(item.get("lastvalue"))
+        return sorted(filter_result.items(), key=lambda item:item[1], reverse=True)
+
+
+
+    def get_traffic_value(self):
+        host_ids = self.get_hosts()
+        filter_result ={}
+        for hostid in host_ids:
+            neirong={
+                "jsonrpc": "2.0",
+                "method": "item.get",
+                "params": {
+                    "output": "extend",
+                    "hostids": hostid,
+                    "sortfield": "name"
+                },
+                "auth": self.authid,
+                "id": 1
+            }
+            response = requests.post(self.url, data=json.dumps(neirong), headers=self.headers)
+            result = response.json()['result']
+            last_value = 0
+            for item in result:
+                if "network traffic on" in item['name']:
+                    if int(item.get("lastvalue")) > last_value:
+                        last_value = int(item.get("lastvalue"))
+            if last_value > 0:
+                filter_result[hostid] = last_value 
+        return sorted(filter_result.items(), key=lambda item:item[1], reverse=True)
+    
+    
+
+    def get_graphid(self, graph_name):
+        if graph_name == "traffic":
+            hostid_by_filter = self.get_traffic_value()
+        if graph_name == "tcp_connection":
+            hostid_by_filter = self.get_conn_value()
+        if graph_name == "CPU utilization":
+            hostid_by_filter = self.get_cpu_value()
+        host_ids = [host[0] for host in hostid_by_filter]
+        graph_ids = []
+        d = {}
+        for hostid in host_ids:
+            neirong={
+                    "jsonrpc": "2.0",
+                    "method": "graph.get",
+                    "params": {
+                        "output": "extend",
+                        "hostids": hostid,
+                        "search": {
+                            "key_": "tcp_connection"  # does't work.
+                         },
+                        "sortfield": "name"
+                    },
+                    "auth": self.authid,
+                    "id": 1
+            }
+            response = requests.post(self.url, data=json.dumps(neirong), headers=self.headers)
+            for graph in response.json()['result']:
+                if graph_name in graph['name']:
+                    if "traffic" in graph_name:
+                        v = self.from_graphid_to_itemid(graph['graphid'])
+                        if not d.get('hostid'):
+                            d[hostid] = [v, graph['graphid']]
+                        if v > d[hostid][0]:
+                            d[hostid] = [v, graph['graphid']]
+                    else:
+                        graph_ids.append(graph['graphid'])
+        if d:
+            sorted_list = sorted(d.items(), key=lambda item:item[1][0], reverse=True)
+            for i in sorted_list:
+                graph_ids.append(i[1][1])
+        return graph_ids
+
+    def create_screen(self, graph_name, screen_name):
+        screenitems = []
+        x,y= (0, 0)
+        for resourceid in self.get_graphid(graph_name):
+            if not x // 3:
+                d = {
+                    "resourcetype": 0,
+                    "resourceid": resourceid,
+                    "rowspan": 1,
+                    "colspan": 1,
+                    "x": x,
+                    "y": y
+                    }
+                x += 1
+            else:
+                d = {
+                    "resourcetype": 0,
+                    "resourceid": resourceid,
+                    "rowspan": 1,
+                    "colspan": 1,
+                    "x": x,
+                    "y": y
+                    }
+                x = 0
+                y += 1
+            screenitems.append(d)
+        if len(screenitems) >= 40:
+            vsize = 40
+            screenitems = screenitems[:40]
+        else:
+            vsize = len(screenitems)
+        neirong={
+                "jsonrpc": "2.0",
+                "method": "screen.create",
+                "params": {
+                    "name": screen_name,
+                    "hsize": 4,
+                    "vsize": vsize,
+                    "screenitems": screenitems
+                },
+                "auth": self.authid,
+                "id": 1
+            }
+        response = requests.post(self.url, data=json.dumps(neirong), headers=self.headers)
+        print(response.text)
+        print("finished.")
+
+if __name__ == '__main__':
+#    zabbixApi().create_screen("tcp_connection", "TCP_Connections")
+#    zabbixApi().create_screen("traffic", "Network_Traffic")
+    zabbixApi().create_screen("CPU utilization", "CPU Usage")