1 回答
TA贡献1815条经验 获得超10个赞
我做了有/没有文件支持的解决方案,默认情况下没有,对于您的情况,支持文件更改use_files = False
到use_files = True
我的脚本内部。
我预计您想要删除具有相同(键,值)对的重复项。
import json
use_files = False
# Only duplicates with next keys will be deleted
only_keys = {'address', 'complex'}
if not use_files:
fdata = """
[
{
"name": "A",
"address": "some address related to A",
"details": "some details related to A"
},
{
"name": "B",
"address": "some address related to A",
"details": "some details related to B",
"complex": ["x", {"y": "z", "p": "q"}],
"dont_remove": "test"
},
{
"name": "C",
"address": "some address related to A",
"details": "some details related to C",
"complex": ["x", {"p": "q", "y": "z"}],
"dont_remove": "test"
}
]
"""
if use_files:
with open("./myfile.json", 'r', encoding = 'utf-8') as fp:
data = fp.read()
else:
data = fdata
entries = json.loads(data)
unique = set()
for e in entries:
for k, v in list(e.items()):
if k not in only_keys:
continue
v = json.dumps(v, sort_keys = True)
if (k, v) in unique:
del e[k]
else:
unique.add((k, v))
if use_files:
with open("./cleanedRedundancy.json", "w", encoding = 'utf-8') as f:
f.write(json.dumps(entries, indent = 4, ensure_ascii = False))
else:
print(json.dumps(entries, indent = 4, ensure_ascii = False))
输出:
[
{
"name": "A",
"address": "some address related to A",
"details": "some details related to A"
},
{
"name": "B",
"details": "some details related to B",
"complex": [
"x",
{
"y": "z",
"p": "q"
}
],
"dont_remove": "test"
},
{
"name": "C",
"details": "some details related to C",
"dont_remove": "test"
}
]
添加回答
举报