2017-10-15 1 views
0

J'ai une sortie json de la commande Linux fio, comme indiqué ci-dessous, que je devrais analyser pour des valeurs comme un dictionnaire, en extrayant certaines valeurs de certaines clés. Mais la couche imbriquée de cette sortie json est en train d'agglutiner la sortie en énormes "valeurs" dans le KVP. Des conseils pour mieux analyser ces structures de données imbriquées?Python - Parse (fio) json sortie

{ 
    "disk_util": [ 
    { 
     "aggr_util": 96.278308, 
     "in_queue": 247376, 
     "write_ticks": 185440, 
     "read_ticks": 61924, 
     "write_merges": 0, 
     "read_merges": 0, 
     "write_ios": 240866, 
     "read_ios": 18257, 
     "name": "dm-0", 
     "util": 97.257058, 
     "aggr_read_ios": 18465, 
     "aggr_write_ios": 243642, 
     "aggr_read_merges": 1, 
     "aggr_write_merge": 72, 
     "aggr_read_ticks": 62420, 
     "aggr_write_ticks": 185796, 
     "aggr_in_queue": 245504 
    }, 
    { 
     "util": 96.278308, 
     "name": "sda", 
     "read_ios": 18465, 
     "write_ios": 243642, 
     "read_merges": 1, 
     "write_merges": 72, 
     "read_ticks": 62420, 
     "write_ticks": 185796, 
     "in_queue": 245504 
    } 
    ], 
    "jobs": [ 
    { 
     "latency_window": 0, 
     "latency_percentile": 100, 
     "latency_target": 0, 
     "latency_depth": 64, 
     "latency_ms": { 
     ">=2000": 0, 
     "2000": 0, 
     "1000": 0, 
     "750": 0, 
     "2": 0, 
     "4": 0, 
     "10": 0, 
     "20": 0, 
     "50": 0, 
     "100": 0, 
     "250": 0, 
     "500": 0 
     }, 
     "latency_us": { 
     "1000": 0, 
     "750": 0, 
     "2": 0, 
     "4": 0, 
     "10": 0, 
     "20": 0, 
     "50": 0, 
     "100": 0, 
     "250": 0, 
     "500": 0 
     }, 
     "write": { 
     "iops_samples": 35, 
     "iops_stddev": 1608.115728, 
     "iops_mean": 13835.571429, 
     "iops_max": 16612, 
     "iops_min": 9754, 
     "bw_samples": 35, 
     "drop_ios": 0, 
     "short_ios": 0, 
     "total_ios": 243678, 
     "runtime": 17611, 
     "iops": 13836.692976, 
     "bw": 55346, 
     "io_kbytes": 974712, 
     "io_bytes": 998105088, 
     "slat_ns": { 
      "stddev": 0, 
      "mean": 0, 
      "max": 0, 
      "min": 0 
     }, 
     "clat_ns": { 
      "percentile": { 
      "0.00": 0 
      }, 
      "stddev": 0, 
      "mean": 0, 
      "max": 0, 
      "min": 0 
     }, 
     "lat_ns": { 
      "stddev": 0, 
      "mean": 0, 
      "max": 0, 
      "min": 0 
     }, 
     "bw_min": 39016, 
     "bw_max": 66448, 
     "bw_agg": 99.994218, 
     "bw_mean": 55342.8, 
     "bw_dev": 6432.427333 
     }, 
     "read": { 
     "iops_samples": 35, 
     "iops_stddev": 126.732776, 
     "iops_mean": 1048.257143, 
     "iops_max": 1336, 
     "iops_min": 772, 
     "bw_samples": 35, 
     "drop_ios": 0, 
     "short_ios": 0, 
     "total_ios": 18466, 
     "runtime": 17611, 
     "iops": 1048.549202, 
     "bw": 4194, 
     "io_kbytes": 73864, 
     "io_bytes": 75636736, 
     "slat_ns": { 
      "stddev": 0, 
      "mean": 0, 
      "max": 0, 
      "min": 0 
     }, 
     "clat_ns": { 
      "percentile": { 
      "0.00": 0 
      }, 
      "stddev": 0, 
      "mean": 0, 
      "max": 0, 
      "min": 0 
     }, 
     "lat_ns": { 
      "stddev": 0, 
      "mean": 0, 
      "max": 0, 
      "min": 0 
     }, 
     "bw_min": 3088, 
     "bw_max": 5344, 
     "bw_agg": 99.993188, 
     "bw_mean": 4193.714286, 
     "bw_dev": 506.844597 
     }, 
     "job options": { 
     "rwmixread": "7", 
     "rw": "randrw", 
     "size": "1G", 
     "iodepth": "64", 
     "bs": "4k", 
     "filename": "test", 
     "name": "test" 
     }, 
     "elapsed": 18, 
     "eta": 0, 
     "error": 0, 
     "groupid": 0, 
     "jobname": "test", 
     "trim": { 
     "iops_samples": 0, 
     "iops_stddev": 0, 
     "iops_mean": 0, 
     "iops_max": 0, 
     "iops_min": 0, 
     "bw_samples": 0, 
     "drop_ios": 0, 
     "short_ios": 0, 
     "total_ios": 0, 
     "runtime": 0, 
     "iops": 0, 
     "bw": 0, 
     "io_kbytes": 0, 
     "io_bytes": 0, 
     "slat_ns": { 
      "stddev": 0, 
      "mean": 0, 
      "max": 0, 
      "min": 0 
     }, 
     "clat_ns": { 
      "percentile": { 
      "0.00": 0 
      }, 
      "stddev": 0, 
      "mean": 0, 
      "max": 0, 
      "min": 0 
     }, 
     "lat_ns": { 
      "stddev": 0, 
      "mean": 0, 
      "max": 0, 
      "min": 0 
     }, 
     "bw_min": 0, 
     "bw_max": 0, 
     "bw_agg": 0, 
     "bw_mean": 0, 
     "bw_dev": 0 
     }, 
     "usr_cpu": 11.447391, 
     "sys_cpu": 74.680597, 
     "ctx": 28972, 
     "majf": 0, 
     "minf": 31, 
     "iodepth_level": { 
     ">=64": 99.975967, 
     "32": 0.1, 
     "16": 0.1, 
     "8": 0.1, 
     "4": 0.1, 
     "2": 0.1, 
     "1": 0.1 
     }, 
     "latency_ns": { 
     "1000": 0, 
     "750": 0, 
     "2": 0, 
     "4": 0, 
     "10": 0, 
     "20": 0, 
     "50": 0, 
     "100": 0, 
     "250": 0, 
     "500": 0 
     } 
    } 
    ], 
    "global options": { 
    "gtod_reduce": "1", 
    "direct": "1", 
    "ioengine": "libaio", 
    "randrepeat": "1" 
    }, 
    "time": "Sat Oct 14 23:18:28 2017", 
    "timestamp_ms": 1508023108010, 
    "timestamp": 1508023108, 
    "fio version": "fio-3.1" 
} 

Je l'importer à partir d'un fichier vraiment simplistes:

import json 

my_file = open('fio.json', 'r') 
my_dict = json.load(my_file) 

for k, v in my_dict.items(): 
    print("Key: {0}, value: {1}").format(k, v) 

Mais quand itérer cela rend toutes les tables imbriquées et dicts retour sortie munged, comme

Key: disk_util, value: [{u'aggr_write_ticks': 185796, u'write_merges': 0, u'write_ticks': 185440, u'write_ios': 240866, u'aggr_write_ios': 243642, u'aggr_read_ticks': 62420, u'read_ios': 18257, u'util': 97.257058, u'read_ticks': 61924, u'aggr_write_merge': 72, u'read_merges': 0, u'aggr_in_queue': 245504, u'aggr_read_ios': 18465, u'aggr_util': 96.278308, u'aggr_read_merges': 1, u'in_queue': 247376, u'name': u'dm-0'}, {u'read_merges': 1, u'name': u'sda', u'write_ios': 243642, u'read_ios': 18465, u'util': 96.278308, u'read_ticks': 62420, u'write_merges': 72, u'in_queue': 245504, u'write_ticks': 185796}] 
+0

Eh bien, c'est comme ça que fonctionnent les dictionnaires imbriqués. –

+0

On dirait qu'il analyse correctement le JSON - si la sortie est votre problème, essayez d'utiliser le module 'pprint'. Ou si vous voulez écrire votre propre méthode de sortie, vous aurez besoin de quelque chose de récursif pour faire l'imbrication/l'indentation au fur et à mesure. – PaulMcG

Répondre

0

JSON .load() conserve le type de fichier json.

Vous semblez avoir une erreur de syntaxe.

Dans la mauvaise position ).

import json 


my_file = open('fio.json', 'r') 
my_dict = json.load(my_file) 

for index, key in enumerate(my_dict): 
    print("Key: {0}, value: {1}".format(key, my_dict[key]))