python_code
stringlengths 0
4.04M
| repo_name
stringlengths 8
58
| file_path
stringlengths 5
147
|
---|---|---|
# Copyright (c) Facebook, Inc. and its affiliates.
import copy
import itertools
import logging
import numpy as np
import pickle
import random
import torch.utils.data as data
from torch.utils.data.sampler import Sampler
from detectron2.utils.serialize import PicklableWrapper
__all__ = ["MapDataset", "DatasetFromList", "AspectRatioGroupedDataset", "ToIterableDataset"]
class MapDataset(data.Dataset):
"""
Map a function over the elements in a dataset.
Args:
dataset: a dataset where map function is applied.
map_func: a callable which maps the element in dataset. map_func is
responsible for error handling, when error happens, it needs to
return None so the MapDataset will randomly use other
elements from the dataset.
"""
def __init__(self, dataset, map_func):
self._dataset = dataset
self._map_func = PicklableWrapper(map_func) # wrap so that a lambda will work
self._rng = random.Random(42)
self._fallback_candidates = set(range(len(dataset)))
def __len__(self):
return len(self._dataset)
def __getitem__(self, idx):
retry_count = 0
cur_idx = int(idx)
while True:
data = self._map_func(self._dataset[cur_idx])
if data is not None:
self._fallback_candidates.add(cur_idx)
return data
# _map_func fails for this idx, use a random new index from the pool
retry_count += 1
self._fallback_candidates.discard(cur_idx)
cur_idx = self._rng.sample(self._fallback_candidates, k=1)[0]
if retry_count >= 3:
logger = logging.getLogger(__name__)
logger.warning(
"Failed to apply `_map_func` for idx: {}, retry count: {}".format(
idx, retry_count
)
)
class DatasetFromList(data.Dataset):
"""
Wrap a list to a torch Dataset. It produces elements of the list as data.
"""
def __init__(self, lst: list, copy: bool = True, serialize: bool = True):
"""
Args:
lst (list): a list which contains elements to produce.
copy (bool): whether to deepcopy the element when producing it,
so that the result can be modified in place without affecting the
source in the list.
serialize (bool): whether to hold memory using serialized objects, when
enabled, data loader workers can use shared RAM from master
process instead of making a copy.
"""
self._lst = lst
self._copy = copy
self._serialize = serialize
def _serialize(data):
buffer = pickle.dumps(data, protocol=-1)
return np.frombuffer(buffer, dtype=np.uint8)
if self._serialize:
logger = logging.getLogger(__name__)
logger.info(
"Serializing {} elements to byte tensors and concatenating them all ...".format(
len(self._lst)
)
)
self._lst = [_serialize(x) for x in self._lst]
self._addr = np.asarray([len(x) for x in self._lst], dtype=np.int64)
self._addr = np.cumsum(self._addr)
self._lst = np.concatenate(self._lst)
logger.info("Serialized dataset takes {:.2f} MiB".format(len(self._lst) / 1024 ** 2))
def __len__(self):
if self._serialize:
return len(self._addr)
else:
return len(self._lst)
def __getitem__(self, idx):
if self._serialize:
start_addr = 0 if idx == 0 else self._addr[idx - 1].item()
end_addr = self._addr[idx].item()
bytes = memoryview(self._lst[start_addr:end_addr])
return pickle.loads(bytes)
elif self._copy:
return copy.deepcopy(self._lst[idx])
else:
return self._lst[idx]
class ToIterableDataset(data.IterableDataset):
"""
Convert an old indices-based (also called map-style) dataset
to an iterable-style dataset.
"""
def __init__(self, dataset, sampler):
"""
Args:
dataset (torch.utils.data.Dataset): an old-style dataset with ``__getitem__``
sampler (torch.utils.data.sampler.Sampler): a cheap iterable that produces indices
to be applied on ``dataset``.
"""
assert not isinstance(dataset, data.IterableDataset), dataset
assert isinstance(sampler, Sampler), sampler
self.dataset = dataset
self.sampler = sampler
def __iter__(self):
worker_info = data.get_worker_info()
if worker_info is None or worker_info.num_workers == 1:
for idx in self.sampler:
yield self.dataset[idx]
else:
# With map-style dataset, `DataLoader(dataset, sampler)` runs the
# sampler in main process only. But `DataLoader(ToIterableDataset(dataset, sampler))`
# will run sampler in every of the N worker and only keep 1/N of the ids on each
# worker. The assumption is that sampler is cheap to iterate and it's fine to discard
# ids in workers.
for idx in itertools.islice(
self.sampler, worker_info.id, None, worker_info.num_workers
):
yield self.dataset[idx]
class AspectRatioGroupedDataset(data.IterableDataset):
"""
Batch data that have similar aspect ratio together.
In this implementation, images whose aspect ratio < (or >) 1 will
be batched together.
This improves training speed because the images then need less padding
to form a batch.
It assumes the underlying dataset produces dicts with "width" and "height" keys.
It will then produce a list of original dicts with length = batch_size,
all with similar aspect ratios.
"""
def __init__(self, dataset, batch_size):
"""
Args:
dataset: an iterable. Each element must be a dict with keys
"width" and "height", which will be used to batch data.
batch_size (int):
"""
self.dataset = dataset
self.batch_size = batch_size
self._buckets = [[] for _ in range(2)]
# Hard-coded two aspect ratio groups: w > h and w < h.
# Can add support for more aspect ratio groups, but doesn't seem useful
def __iter__(self):
for d in self.dataset:
w, h = d["width"], d["height"]
bucket_id = 0 if w > h else 1
bucket = self._buckets[bucket_id]
bucket.append(d)
if len(bucket) == self.batch_size:
yield bucket[:]
del bucket[:]
|
banmo-main
|
third_party/detectron2_old/detectron2/data/common.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
import contextlib
import datetime
import io
import json
import logging
import numpy as np
import os
import shutil
import pycocotools.mask as mask_util
from fvcore.common.timer import Timer
from iopath.common.file_io import file_lock
from PIL import Image
from detectron2.structures import Boxes, BoxMode, PolygonMasks, RotatedBoxes
from detectron2.utils.file_io import PathManager
from .. import DatasetCatalog, MetadataCatalog
"""
This file contains functions to parse COCO-format annotations into dicts in "Detectron2 format".
"""
logger = logging.getLogger(__name__)
__all__ = ["load_coco_json", "load_sem_seg", "convert_to_coco_json", "register_coco_instances"]
def load_coco_json(json_file, image_root, dataset_name=None, extra_annotation_keys=None):
"""
Load a json file with COCO's instances annotation format.
Currently supports instance detection, instance segmentation,
and person keypoints annotations.
Args:
json_file (str): full path to the json file in COCO instances annotation format.
image_root (str or path-like): the directory where the images in this json file exists.
dataset_name (str or None): the name of the dataset (e.g., coco_2017_train).
When provided, this function will also do the following:
* Put "thing_classes" into the metadata associated with this dataset.
* Map the category ids into a contiguous range (needed by standard dataset format),
and add "thing_dataset_id_to_contiguous_id" to the metadata associated
with this dataset.
This option should usually be provided, unless users need to load
the original json content and apply more processing manually.
extra_annotation_keys (list[str]): list of per-annotation keys that should also be
loaded into the dataset dict (besides "iscrowd", "bbox", "keypoints",
"category_id", "segmentation"). The values for these keys will be returned as-is.
For example, the densepose annotations are loaded in this way.
Returns:
list[dict]: a list of dicts in Detectron2 standard dataset dicts format (See
`Using Custom Datasets </tutorials/datasets.html>`_ ) when `dataset_name` is not None.
If `dataset_name` is None, the returned `category_ids` may be
incontiguous and may not conform to the Detectron2 standard format.
Notes:
1. This function does not read the image files.
The results do not have the "image" field.
"""
from pycocotools.coco import COCO
timer = Timer()
json_file = PathManager.get_local_path(json_file)
with contextlib.redirect_stdout(io.StringIO()):
coco_api = COCO(json_file)
if timer.seconds() > 1:
logger.info("Loading {} takes {:.2f} seconds.".format(json_file, timer.seconds()))
id_map = None
if dataset_name is not None:
meta = MetadataCatalog.get(dataset_name)
cat_ids = sorted(coco_api.getCatIds())
cats = coco_api.loadCats(cat_ids)
# The categories in a custom json file may not be sorted.
thing_classes = [c["name"] for c in sorted(cats, key=lambda x: x["id"])]
meta.thing_classes = thing_classes
# In COCO, certain category ids are artificially removed,
# and by convention they are always ignored.
# We deal with COCO's id issue and translate
# the category ids to contiguous ids in [0, 80).
# It works by looking at the "categories" field in the json, therefore
# if users' own json also have incontiguous ids, we'll
# apply this mapping as well but print a warning.
if not (min(cat_ids) == 1 and max(cat_ids) == len(cat_ids)):
if "coco" not in dataset_name:
logger.warning(
"""
Category ids in annotations are not in [1, #categories]! We'll apply a mapping for you.
"""
)
id_map = {v: i for i, v in enumerate(cat_ids)}
meta.thing_dataset_id_to_contiguous_id = id_map
# sort indices for reproducible results
img_ids = sorted(coco_api.imgs.keys())
# imgs is a list of dicts, each looks something like:
# {'license': 4,
# 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg',
# 'file_name': 'COCO_val2014_000000001268.jpg',
# 'height': 427,
# 'width': 640,
# 'date_captured': '2013-11-17 05:57:24',
# 'id': 1268}
imgs = coco_api.loadImgs(img_ids)
# anns is a list[list[dict]], where each dict is an annotation
# record for an object. The inner list enumerates the objects in an image
# and the outer list enumerates over images. Example of anns[0]:
# [{'segmentation': [[192.81,
# 247.09,
# ...
# 219.03,
# 249.06]],
# 'area': 1035.749,
# 'iscrowd': 0,
# 'image_id': 1268,
# 'bbox': [192.81, 224.8, 74.73, 33.43],
# 'category_id': 16,
# 'id': 42986},
# ...]
anns = [coco_api.imgToAnns[img_id] for img_id in img_ids]
total_num_valid_anns = sum([len(x) for x in anns])
total_num_anns = len(coco_api.anns)
if total_num_valid_anns < total_num_anns:
logger.warning(
f"{json_file} contains {total_num_anns} annotations, but only "
f"{total_num_valid_anns} of them match to images in the file."
)
if "minival" not in json_file:
# The popular valminusminival & minival annotations for COCO2014 contain this bug.
# However the ratio of buggy annotations there is tiny and does not affect accuracy.
# Therefore we explicitly white-list them.
ann_ids = [ann["id"] for anns_per_image in anns for ann in anns_per_image]
assert len(set(ann_ids)) == len(ann_ids), "Annotation ids in '{}' are not unique!".format(
json_file
)
imgs_anns = list(zip(imgs, anns))
logger.info("Loaded {} images in COCO format from {}".format(len(imgs_anns), json_file))
dataset_dicts = []
ann_keys = ["iscrowd", "bbox", "keypoints", "category_id"] + (extra_annotation_keys or [])
num_instances_without_valid_segmentation = 0
for (img_dict, anno_dict_list) in imgs_anns:
record = {}
record["file_name"] = os.path.join(image_root, img_dict["file_name"])
record["height"] = img_dict["height"]
record["width"] = img_dict["width"]
image_id = record["image_id"] = img_dict["id"]
objs = []
for anno in anno_dict_list:
# Check that the image_id in this annotation is the same as
# the image_id we're looking at.
# This fails only when the data parsing logic or the annotation file is buggy.
# The original COCO valminusminival2014 & minival2014 annotation files
# actually contains bugs that, together with certain ways of using COCO API,
# can trigger this assertion.
assert anno["image_id"] == image_id
assert anno.get("ignore", 0) == 0, '"ignore" in COCO json file is not supported.'
obj = {key: anno[key] for key in ann_keys if key in anno}
if "bbox" in obj and len(obj["bbox"]) == 0:
raise ValueError(
f"One annotation of image {image_id} contains empty 'bbox' value! "
"This json does not have valid COCO format."
)
segm = anno.get("segmentation", None)
if segm: # either list[list[float]] or dict(RLE)
if isinstance(segm, dict):
if isinstance(segm["counts"], list):
# convert to compressed RLE
segm = mask_util.frPyObjects(segm, *segm["size"])
else:
# filter out invalid polygons (< 3 points)
segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6]
if len(segm) == 0:
num_instances_without_valid_segmentation += 1
continue # ignore this instance
obj["segmentation"] = segm
keypts = anno.get("keypoints", None)
if keypts: # list[int]
for idx, v in enumerate(keypts):
if idx % 3 != 2:
# COCO's segmentation coordinates are floating points in [0, H or W],
# but keypoint coordinates are integers in [0, H-1 or W-1]
# Therefore we assume the coordinates are "pixel indices" and
# add 0.5 to convert to floating point coordinates.
keypts[idx] = v + 0.5
obj["keypoints"] = keypts
obj["bbox_mode"] = BoxMode.XYWH_ABS
if id_map:
annotation_category_id = obj["category_id"]
try:
obj["category_id"] = id_map[annotation_category_id]
except KeyError as e:
raise KeyError(
f"Encountered category_id={annotation_category_id} "
"but this id does not exist in 'categories' of the json file."
) from e
objs.append(obj)
record["annotations"] = objs
dataset_dicts.append(record)
if num_instances_without_valid_segmentation > 0:
logger.warning(
"Filtered out {} instances without valid segmentation. ".format(
num_instances_without_valid_segmentation
)
+ "There might be issues in your dataset generation process. Please "
"check https://detectron2.readthedocs.io/en/latest/tutorials/datasets.html carefully"
)
return dataset_dicts
def load_sem_seg(gt_root, image_root, gt_ext="png", image_ext="jpg"):
"""
Load semantic segmentation datasets. All files under "gt_root" with "gt_ext" extension are
treated as ground truth annotations and all files under "image_root" with "image_ext" extension
as input images. Ground truth and input images are matched using file paths relative to
"gt_root" and "image_root" respectively without taking into account file extensions.
This works for COCO as well as some other datasets.
Args:
gt_root (str): full path to ground truth semantic segmentation files. Semantic segmentation
annotations are stored as images with integer values in pixels that represent
corresponding semantic labels.
image_root (str): the directory where the input images are.
gt_ext (str): file extension for ground truth annotations.
image_ext (str): file extension for input images.
Returns:
list[dict]:
a list of dicts in detectron2 standard format without instance-level
annotation.
Notes:
1. This function does not read the image and ground truth files.
The results do not have the "image" and "sem_seg" fields.
"""
# We match input images with ground truth based on their relative filepaths (without file
# extensions) starting from 'image_root' and 'gt_root' respectively.
def file2id(folder_path, file_path):
# extract relative path starting from `folder_path`
image_id = os.path.normpath(os.path.relpath(file_path, start=folder_path))
# remove file extension
image_id = os.path.splitext(image_id)[0]
return image_id
input_files = sorted(
(os.path.join(image_root, f) for f in PathManager.ls(image_root) if f.endswith(image_ext)),
key=lambda file_path: file2id(image_root, file_path),
)
gt_files = sorted(
(os.path.join(gt_root, f) for f in PathManager.ls(gt_root) if f.endswith(gt_ext)),
key=lambda file_path: file2id(gt_root, file_path),
)
assert len(gt_files) > 0, "No annotations found in {}.".format(gt_root)
# Use the intersection, so that val2017_100 annotations can run smoothly with val2017 images
if len(input_files) != len(gt_files):
logger.warn(
"Directory {} and {} has {} and {} files, respectively.".format(
image_root, gt_root, len(input_files), len(gt_files)
)
)
input_basenames = [os.path.basename(f)[: -len(image_ext)] for f in input_files]
gt_basenames = [os.path.basename(f)[: -len(gt_ext)] for f in gt_files]
intersect = list(set(input_basenames) & set(gt_basenames))
# sort, otherwise each worker may obtain a list[dict] in different order
intersect = sorted(intersect)
logger.warn("Will use their intersection of {} files.".format(len(intersect)))
input_files = [os.path.join(image_root, f + image_ext) for f in intersect]
gt_files = [os.path.join(gt_root, f + gt_ext) for f in intersect]
logger.info(
"Loaded {} images with semantic segmentation from {}".format(len(input_files), image_root)
)
dataset_dicts = []
for (img_path, gt_path) in zip(input_files, gt_files):
record = {}
record["file_name"] = img_path
record["sem_seg_file_name"] = gt_path
dataset_dicts.append(record)
return dataset_dicts
def convert_to_coco_dict(dataset_name):
"""
Convert an instance detection/segmentation or keypoint detection dataset
in detectron2's standard format into COCO json format.
Generic dataset description can be found here:
https://detectron2.readthedocs.io/tutorials/datasets.html#register-a-dataset
COCO data format description can be found here:
http://cocodataset.org/#format-data
Args:
dataset_name (str):
name of the source dataset
Must be registered in DatastCatalog and in detectron2's standard format.
Must have corresponding metadata "thing_classes"
Returns:
coco_dict: serializable dict in COCO json format
"""
dataset_dicts = DatasetCatalog.get(dataset_name)
metadata = MetadataCatalog.get(dataset_name)
# unmap the category mapping ids for COCO
if hasattr(metadata, "thing_dataset_id_to_contiguous_id"):
reverse_id_mapping = {v: k for k, v in metadata.thing_dataset_id_to_contiguous_id.items()}
reverse_id_mapper = lambda contiguous_id: reverse_id_mapping[contiguous_id] # noqa
else:
reverse_id_mapper = lambda contiguous_id: contiguous_id # noqa
categories = [
{"id": reverse_id_mapper(id), "name": name}
for id, name in enumerate(metadata.thing_classes)
]
logger.info("Converting dataset dicts into COCO format")
coco_images = []
coco_annotations = []
for image_id, image_dict in enumerate(dataset_dicts):
coco_image = {
"id": image_dict.get("image_id", image_id),
"width": int(image_dict["width"]),
"height": int(image_dict["height"]),
"file_name": str(image_dict["file_name"]),
}
coco_images.append(coco_image)
anns_per_image = image_dict.get("annotations", [])
for annotation in anns_per_image:
# create a new dict with only COCO fields
coco_annotation = {}
# COCO requirement: XYWH box format for axis-align and XYWHA for rotated
bbox = annotation["bbox"]
if isinstance(bbox, np.ndarray):
if bbox.ndim != 1:
raise ValueError(f"bbox has to be 1-dimensional. Got shape={bbox.shape}.")
bbox = bbox.tolist()
if len(bbox) not in [4, 5]:
raise ValueError(f"bbox has to has length 4 or 5. Got {bbox}.")
from_bbox_mode = annotation["bbox_mode"]
to_bbox_mode = BoxMode.XYWH_ABS if len(bbox) == 4 else BoxMode.XYWHA_ABS
bbox = BoxMode.convert(bbox, from_bbox_mode, to_bbox_mode)
# COCO requirement: instance area
if "segmentation" in annotation:
# Computing areas for instances by counting the pixels
segmentation = annotation["segmentation"]
# TODO: check segmentation type: RLE, BinaryMask or Polygon
if isinstance(segmentation, list):
polygons = PolygonMasks([segmentation])
area = polygons.area()[0].item()
elif isinstance(segmentation, dict): # RLE
area = mask_util.area(segmentation).item()
else:
raise TypeError(f"Unknown segmentation type {type(segmentation)}!")
else:
# Computing areas using bounding boxes
if to_bbox_mode == BoxMode.XYWH_ABS:
bbox_xy = BoxMode.convert(bbox, to_bbox_mode, BoxMode.XYXY_ABS)
area = Boxes([bbox_xy]).area()[0].item()
else:
area = RotatedBoxes([bbox]).area()[0].item()
if "keypoints" in annotation:
keypoints = annotation["keypoints"] # list[int]
for idx, v in enumerate(keypoints):
if idx % 3 != 2:
# COCO's segmentation coordinates are floating points in [0, H or W],
# but keypoint coordinates are integers in [0, H-1 or W-1]
# For COCO format consistency we substract 0.5
# https://github.com/facebookresearch/detectron2/pull/175#issuecomment-551202163
keypoints[idx] = v - 0.5
if "num_keypoints" in annotation:
num_keypoints = annotation["num_keypoints"]
else:
num_keypoints = sum(kp > 0 for kp in keypoints[2::3])
# COCO requirement:
# linking annotations to images
# "id" field must start with 1
coco_annotation["id"] = len(coco_annotations) + 1
coco_annotation["image_id"] = coco_image["id"]
coco_annotation["bbox"] = [round(float(x), 3) for x in bbox]
coco_annotation["area"] = float(area)
coco_annotation["iscrowd"] = int(annotation.get("iscrowd", 0))
coco_annotation["category_id"] = int(reverse_id_mapper(annotation["category_id"]))
# Add optional fields
if "keypoints" in annotation:
coco_annotation["keypoints"] = keypoints
coco_annotation["num_keypoints"] = num_keypoints
if "segmentation" in annotation:
seg = coco_annotation["segmentation"] = annotation["segmentation"]
if isinstance(seg, dict): # RLE
counts = seg["counts"]
if not isinstance(counts, str):
# make it json-serializable
seg["counts"] = counts.decode("ascii")
coco_annotations.append(coco_annotation)
logger.info(
"Conversion finished, "
f"#images: {len(coco_images)}, #annotations: {len(coco_annotations)}"
)
info = {
"date_created": str(datetime.datetime.now()),
"description": "Automatically generated COCO json file for Detectron2.",
}
coco_dict = {"info": info, "images": coco_images, "categories": categories, "licenses": None}
if len(coco_annotations) > 0:
coco_dict["annotations"] = coco_annotations
return coco_dict
def convert_to_coco_json(dataset_name, output_file, allow_cached=True):
"""
Converts dataset into COCO format and saves it to a json file.
dataset_name must be registered in DatasetCatalog and in detectron2's standard format.
Args:
dataset_name:
reference from the config file to the catalogs
must be registered in DatasetCatalog and in detectron2's standard format
output_file: path of json file that will be saved to
allow_cached: if json file is already present then skip conversion
"""
# TODO: The dataset or the conversion script *may* change,
# a checksum would be useful for validating the cached data
PathManager.mkdirs(os.path.dirname(output_file))
with file_lock(output_file):
if PathManager.exists(output_file) and allow_cached:
logger.warning(
f"Using previously cached COCO format annotations at '{output_file}'. "
"You need to clear the cache file if your dataset has been modified."
)
else:
logger.info(f"Converting annotations of dataset '{dataset_name}' to COCO format ...)")
coco_dict = convert_to_coco_dict(dataset_name)
logger.info(f"Caching COCO format annotations at '{output_file}' ...")
tmp_file = output_file + ".tmp"
with PathManager.open(tmp_file, "w") as f:
json.dump(coco_dict, f)
shutil.move(tmp_file, output_file)
def register_coco_instances(name, metadata, json_file, image_root):
"""
Register a dataset in COCO's json annotation format for
instance detection, instance segmentation and keypoint detection.
(i.e., Type 1 and 2 in http://cocodataset.org/#format-data.
`instances*.json` and `person_keypoints*.json` in the dataset).
This is an example of how to register a new dataset.
You can do something similar to this function, to register new datasets.
Args:
name (str): the name that identifies a dataset, e.g. "coco_2014_train".
metadata (dict): extra metadata associated with this dataset. You can
leave it as an empty dict.
json_file (str): path to the json instance annotation file.
image_root (str or path-like): directory which contains all the images.
"""
assert isinstance(name, str), name
assert isinstance(json_file, (str, os.PathLike)), json_file
assert isinstance(image_root, (str, os.PathLike)), image_root
# 1. register a function which returns dicts
DatasetCatalog.register(name, lambda: load_coco_json(json_file, image_root, name))
# 2. Optionally, add metadata about this dataset,
# since they might be useful in evaluation, visualization or logging
MetadataCatalog.get(name).set(
json_file=json_file, image_root=image_root, evaluator_type="coco", **metadata
)
if __name__ == "__main__":
"""
Test the COCO json dataset loader.
Usage:
python -m detectron2.data.datasets.coco \
path/to/json path/to/image_root dataset_name
"dataset_name" can be "coco_2014_minival_100", or other
pre-registered ones
"""
from detectron2.utils.logger import setup_logger
from detectron2.utils.visualizer import Visualizer
import detectron2.data.datasets # noqa # add pre-defined metadata
import sys
logger = setup_logger(name=__name__)
assert sys.argv[3] in DatasetCatalog.list()
meta = MetadataCatalog.get(sys.argv[3])
dicts = load_coco_json(sys.argv[1], sys.argv[2], sys.argv[3])
logger.info("Done loading {} samples.".format(len(dicts)))
dirname = "coco-data-vis"
os.makedirs(dirname, exist_ok=True)
for d in dicts:
img = np.array(Image.open(d["file_name"]))
visualizer = Visualizer(img, metadata=meta)
vis = visualizer.draw_dataset_dict(d)
fpath = os.path.join(dirname, os.path.basename(d["file_name"]))
vis.save(fpath)
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/coco.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
from .coco import register_coco_instances # noqa
from .coco_panoptic import register_coco_panoptic_separated # noqa
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/register_coco.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
import functools
import json
import logging
import multiprocessing as mp
import numpy as np
import os
from itertools import chain
import pycocotools.mask as mask_util
from PIL import Image
from detectron2.structures import BoxMode
from detectron2.utils.comm import get_world_size
from detectron2.utils.file_io import PathManager
from detectron2.utils.logger import setup_logger
try:
import cv2 # noqa
except ImportError:
# OpenCV is an optional dependency at the moment
pass
logger = logging.getLogger(__name__)
def _get_cityscapes_files(image_dir, gt_dir):
files = []
# scan through the directory
cities = PathManager.ls(image_dir)
logger.info(f"{len(cities)} cities found in '{image_dir}'.")
for city in cities:
city_img_dir = os.path.join(image_dir, city)
city_gt_dir = os.path.join(gt_dir, city)
for basename in PathManager.ls(city_img_dir):
image_file = os.path.join(city_img_dir, basename)
suffix = "leftImg8bit.png"
assert basename.endswith(suffix), basename
basename = basename[: -len(suffix)]
instance_file = os.path.join(city_gt_dir, basename + "gtFine_instanceIds.png")
label_file = os.path.join(city_gt_dir, basename + "gtFine_labelIds.png")
json_file = os.path.join(city_gt_dir, basename + "gtFine_polygons.json")
files.append((image_file, instance_file, label_file, json_file))
assert len(files), "No images found in {}".format(image_dir)
for f in files[0]:
assert PathManager.isfile(f), f
return files
def load_cityscapes_instances(image_dir, gt_dir, from_json=True, to_polygons=True):
"""
Args:
image_dir (str): path to the raw dataset. e.g., "~/cityscapes/leftImg8bit/train".
gt_dir (str): path to the raw annotations. e.g., "~/cityscapes/gtFine/train".
from_json (bool): whether to read annotations from the raw json file or the png files.
to_polygons (bool): whether to represent the segmentation as polygons
(COCO's format) instead of masks (cityscapes's format).
Returns:
list[dict]: a list of dicts in Detectron2 standard format. (See
`Using Custom Datasets </tutorials/datasets.html>`_ )
"""
if from_json:
assert to_polygons, (
"Cityscapes's json annotations are in polygon format. "
"Converting to mask format is not supported now."
)
files = _get_cityscapes_files(image_dir, gt_dir)
logger.info("Preprocessing cityscapes annotations ...")
# This is still not fast: all workers will execute duplicate works and will
# take up to 10m on a 8GPU server.
pool = mp.Pool(processes=max(mp.cpu_count() // get_world_size() // 2, 4))
ret = pool.map(
functools.partial(_cityscapes_files_to_dict, from_json=from_json, to_polygons=to_polygons),
files,
)
logger.info("Loaded {} images from {}".format(len(ret), image_dir))
# Map cityscape ids to contiguous ids
from cityscapesscripts.helpers.labels import labels
labels = [l for l in labels if l.hasInstances and not l.ignoreInEval]
dataset_id_to_contiguous_id = {l.id: idx for idx, l in enumerate(labels)}
for dict_per_image in ret:
for anno in dict_per_image["annotations"]:
anno["category_id"] = dataset_id_to_contiguous_id[anno["category_id"]]
return ret
def load_cityscapes_semantic(image_dir, gt_dir):
"""
Args:
image_dir (str): path to the raw dataset. e.g., "~/cityscapes/leftImg8bit/train".
gt_dir (str): path to the raw annotations. e.g., "~/cityscapes/gtFine/train".
Returns:
list[dict]: a list of dict, each has "file_name" and
"sem_seg_file_name".
"""
ret = []
# gt_dir is small and contain many small files. make sense to fetch to local first
gt_dir = PathManager.get_local_path(gt_dir)
for image_file, _, label_file, json_file in _get_cityscapes_files(image_dir, gt_dir):
label_file = label_file.replace("labelIds", "labelTrainIds")
with PathManager.open(json_file, "r") as f:
jsonobj = json.load(f)
ret.append(
{
"file_name": image_file,
"sem_seg_file_name": label_file,
"height": jsonobj["imgHeight"],
"width": jsonobj["imgWidth"],
}
)
assert len(ret), f"No images found in {image_dir}!"
assert PathManager.isfile(
ret[0]["sem_seg_file_name"]
), "Please generate labelTrainIds.png with cityscapesscripts/preparation/createTrainIdLabelImgs.py" # noqa
return ret
def _cityscapes_files_to_dict(files, from_json, to_polygons):
"""
Parse cityscapes annotation files to a instance segmentation dataset dict.
Args:
files (tuple): consists of (image_file, instance_id_file, label_id_file, json_file)
from_json (bool): whether to read annotations from the raw json file or the png files.
to_polygons (bool): whether to represent the segmentation as polygons
(COCO's format) instead of masks (cityscapes's format).
Returns:
A dict in Detectron2 Dataset format.
"""
from cityscapesscripts.helpers.labels import id2label, name2label
image_file, instance_id_file, _, json_file = files
annos = []
if from_json:
from shapely.geometry import MultiPolygon, Polygon
with PathManager.open(json_file, "r") as f:
jsonobj = json.load(f)
ret = {
"file_name": image_file,
"image_id": os.path.basename(image_file),
"height": jsonobj["imgHeight"],
"width": jsonobj["imgWidth"],
}
# `polygons_union` contains the union of all valid polygons.
polygons_union = Polygon()
# CityscapesScripts draw the polygons in sequential order
# and each polygon *overwrites* existing ones. See
# (https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/preparation/json2instanceImg.py) # noqa
# We use reverse order, and each polygon *avoids* early ones.
# This will resolve the ploygon overlaps in the same way as CityscapesScripts.
for obj in jsonobj["objects"][::-1]:
if "deleted" in obj: # cityscapes data format specific
continue
label_name = obj["label"]
try:
label = name2label[label_name]
except KeyError:
if label_name.endswith("group"): # crowd area
label = name2label[label_name[: -len("group")]]
else:
raise
if label.id < 0: # cityscapes data format
continue
# Cityscapes's raw annotations uses integer coordinates
# Therefore +0.5 here
poly_coord = np.asarray(obj["polygon"], dtype="f4") + 0.5
# CityscapesScript uses PIL.ImageDraw.polygon to rasterize
# polygons for evaluation. This function operates in integer space
# and draws each pixel whose center falls into the polygon.
# Therefore it draws a polygon which is 0.5 "fatter" in expectation.
# We therefore dilate the input polygon by 0.5 as our input.
poly = Polygon(poly_coord).buffer(0.5, resolution=4)
if not label.hasInstances or label.ignoreInEval:
# even if we won't store the polygon it still contributes to overlaps resolution
polygons_union = polygons_union.union(poly)
continue
# Take non-overlapping part of the polygon
poly_wo_overlaps = poly.difference(polygons_union)
if poly_wo_overlaps.is_empty:
continue
polygons_union = polygons_union.union(poly)
anno = {}
anno["iscrowd"] = label_name.endswith("group")
anno["category_id"] = label.id
if isinstance(poly_wo_overlaps, Polygon):
poly_list = [poly_wo_overlaps]
elif isinstance(poly_wo_overlaps, MultiPolygon):
poly_list = poly_wo_overlaps.geoms
else:
raise NotImplementedError("Unknown geometric structure {}".format(poly_wo_overlaps))
poly_coord = []
for poly_el in poly_list:
# COCO API can work only with exterior boundaries now, hence we store only them.
# TODO: store both exterior and interior boundaries once other parts of the
# codebase support holes in polygons.
poly_coord.append(list(chain(*poly_el.exterior.coords)))
anno["segmentation"] = poly_coord
(xmin, ymin, xmax, ymax) = poly_wo_overlaps.bounds
anno["bbox"] = (xmin, ymin, xmax, ymax)
anno["bbox_mode"] = BoxMode.XYXY_ABS
annos.append(anno)
else:
# See also the official annotation parsing scripts at
# https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/instances2dict.py # noqa
with PathManager.open(instance_id_file, "rb") as f:
inst_image = np.asarray(Image.open(f), order="F")
# ids < 24 are stuff labels (filtering them first is about 5% faster)
flattened_ids = np.unique(inst_image[inst_image >= 24])
ret = {
"file_name": image_file,
"image_id": os.path.basename(image_file),
"height": inst_image.shape[0],
"width": inst_image.shape[1],
}
for instance_id in flattened_ids:
# For non-crowd annotations, instance_id // 1000 is the label_id
# Crowd annotations have <1000 instance ids
label_id = instance_id // 1000 if instance_id >= 1000 else instance_id
label = id2label[label_id]
if not label.hasInstances or label.ignoreInEval:
continue
anno = {}
anno["iscrowd"] = instance_id < 1000
anno["category_id"] = label.id
mask = np.asarray(inst_image == instance_id, dtype=np.uint8, order="F")
inds = np.nonzero(mask)
ymin, ymax = inds[0].min(), inds[0].max()
xmin, xmax = inds[1].min(), inds[1].max()
anno["bbox"] = (xmin, ymin, xmax, ymax)
if xmax <= xmin or ymax <= ymin:
continue
anno["bbox_mode"] = BoxMode.XYXY_ABS
if to_polygons:
# This conversion comes from D4809743 and D5171122,
# when Mask-RCNN was first developed.
contours = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[
-2
]
polygons = [c.reshape(-1).tolist() for c in contours if len(c) >= 3]
# opencv's can produce invalid polygons
if len(polygons) == 0:
continue
anno["segmentation"] = polygons
else:
anno["segmentation"] = mask_util.encode(mask[:, :, None])[0]
annos.append(anno)
ret["annotations"] = annos
return ret
if __name__ == "__main__":
"""
Test the cityscapes dataset loader.
Usage:
python -m detectron2.data.datasets.cityscapes \
cityscapes/leftImg8bit/train cityscapes/gtFine/train
"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("image_dir")
parser.add_argument("gt_dir")
parser.add_argument("--type", choices=["instance", "semantic"], default="instance")
args = parser.parse_args()
from detectron2.data.catalog import Metadata
from detectron2.utils.visualizer import Visualizer
from cityscapesscripts.helpers.labels import labels
logger = setup_logger(name=__name__)
dirname = "cityscapes-data-vis"
os.makedirs(dirname, exist_ok=True)
if args.type == "instance":
dicts = load_cityscapes_instances(
args.image_dir, args.gt_dir, from_json=True, to_polygons=True
)
logger.info("Done loading {} samples.".format(len(dicts)))
thing_classes = [k.name for k in labels if k.hasInstances and not k.ignoreInEval]
meta = Metadata().set(thing_classes=thing_classes)
else:
dicts = load_cityscapes_semantic(args.image_dir, args.gt_dir)
logger.info("Done loading {} samples.".format(len(dicts)))
stuff_classes = [k.name for k in labels if k.trainId != 255]
stuff_colors = [k.color for k in labels if k.trainId != 255]
meta = Metadata().set(stuff_classes=stuff_classes, stuff_colors=stuff_colors)
for d in dicts:
img = np.array(Image.open(PathManager.open(d["file_name"], "rb")))
visualizer = Visualizer(img, metadata=meta)
vis = visualizer.draw_dataset_dict(d)
# cv2.imshow("a", vis.get_image()[:, :, ::-1])
# cv2.waitKey()
fpath = os.path.join(dirname, os.path.basename(d["file_name"]))
vis.save(fpath)
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/cityscapes.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
import json
import logging
import os
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.data.datasets.builtin_meta import CITYSCAPES_CATEGORIES
from detectron2.utils.file_io import PathManager
"""
This file contains functions to register the Cityscapes panoptic dataset to the DatasetCatalog.
"""
logger = logging.getLogger(__name__)
def get_cityscapes_panoptic_files(image_dir, gt_dir, json_info):
files = []
# scan through the directory
cities = PathManager.ls(image_dir)
logger.info(f"{len(cities)} cities found in '{image_dir}'.")
image_dict = {}
for city in cities:
city_img_dir = os.path.join(image_dir, city)
for basename in PathManager.ls(city_img_dir):
image_file = os.path.join(city_img_dir, basename)
suffix = "_leftImg8bit.png"
assert basename.endswith(suffix), basename
basename = os.path.basename(basename)[: -len(suffix)]
image_dict[basename] = image_file
for ann in json_info["annotations"]:
image_file = image_dict.get(ann["image_id"], None)
assert image_file is not None, "No image {} found for annotation {}".format(
ann["image_id"], ann["file_name"]
)
label_file = os.path.join(gt_dir, ann["file_name"])
segments_info = ann["segments_info"]
files.append((image_file, label_file, segments_info))
assert len(files), "No images found in {}".format(image_dir)
assert PathManager.isfile(files[0][0]), files[0][0]
assert PathManager.isfile(files[0][1]), files[0][1]
return files
def load_cityscapes_panoptic(image_dir, gt_dir, gt_json, meta):
"""
Args:
image_dir (str): path to the raw dataset. e.g., "~/cityscapes/leftImg8bit/train".
gt_dir (str): path to the raw annotations. e.g.,
"~/cityscapes/gtFine/cityscapes_panoptic_train".
gt_json (str): path to the json file. e.g.,
"~/cityscapes/gtFine/cityscapes_panoptic_train.json".
meta (dict): dictionary containing "thing_dataset_id_to_contiguous_id"
and "stuff_dataset_id_to_contiguous_id" to map category ids to
contiguous ids for training.
Returns:
list[dict]: a list of dicts in Detectron2 standard format. (See
`Using Custom Datasets </tutorials/datasets.html>`_ )
"""
def _convert_category_id(segment_info, meta):
if segment_info["category_id"] in meta["thing_dataset_id_to_contiguous_id"]:
segment_info["category_id"] = meta["thing_dataset_id_to_contiguous_id"][
segment_info["category_id"]
]
else:
segment_info["category_id"] = meta["stuff_dataset_id_to_contiguous_id"][
segment_info["category_id"]
]
return segment_info
assert os.path.exists(
gt_json
), "Please run `python cityscapesscripts/preparation/createPanopticImgs.py` to generate label files." # noqa
with open(gt_json) as f:
json_info = json.load(f)
files = get_cityscapes_panoptic_files(image_dir, gt_dir, json_info)
ret = []
for image_file, label_file, segments_info in files:
sem_label_file = (
image_file.replace("leftImg8bit", "gtFine").split(".")[0] + "_labelTrainIds.png"
)
segments_info = [_convert_category_id(x, meta) for x in segments_info]
ret.append(
{
"file_name": image_file,
"image_id": "_".join(
os.path.splitext(os.path.basename(image_file))[0].split("_")[:3]
),
"sem_seg_file_name": sem_label_file,
"pan_seg_file_name": label_file,
"segments_info": segments_info,
}
)
assert len(ret), f"No images found in {image_dir}!"
assert PathManager.isfile(
ret[0]["sem_seg_file_name"]
), "Please generate labelTrainIds.png with cityscapesscripts/preparation/createTrainIdLabelImgs.py" # noqa
assert PathManager.isfile(
ret[0]["pan_seg_file_name"]
), "Please generate panoptic annotation with python cityscapesscripts/preparation/createPanopticImgs.py" # noqa
return ret
_RAW_CITYSCAPES_PANOPTIC_SPLITS = {
"cityscapes_fine_panoptic_train": (
"cityscapes/leftImg8bit/train",
"cityscapes/gtFine/cityscapes_panoptic_train",
"cityscapes/gtFine/cityscapes_panoptic_train.json",
),
"cityscapes_fine_panoptic_val": (
"cityscapes/leftImg8bit/val",
"cityscapes/gtFine/cityscapes_panoptic_val",
"cityscapes/gtFine/cityscapes_panoptic_val.json",
),
# "cityscapes_fine_panoptic_test": not supported yet
}
def register_all_cityscapes_panoptic(root):
meta = {}
# The following metadata maps contiguous id from [0, #thing categories +
# #stuff categories) to their names and colors. We have to replica of the
# same name and color under "thing_*" and "stuff_*" because the current
# visualization function in D2 handles thing and class classes differently
# due to some heuristic used in Panoptic FPN. We keep the same naming to
# enable reusing existing visualization functions.
thing_classes = [k["name"] for k in CITYSCAPES_CATEGORIES]
thing_colors = [k["color"] for k in CITYSCAPES_CATEGORIES]
stuff_classes = [k["name"] for k in CITYSCAPES_CATEGORIES]
stuff_colors = [k["color"] for k in CITYSCAPES_CATEGORIES]
meta["thing_classes"] = thing_classes
meta["thing_colors"] = thing_colors
meta["stuff_classes"] = stuff_classes
meta["stuff_colors"] = stuff_colors
# There are three types of ids in cityscapes panoptic segmentation:
# (1) category id: like semantic segmentation, it is the class id for each
# pixel. Since there are some classes not used in evaluation, the category
# id is not always contiguous and thus we have two set of category ids:
# - original category id: category id in the original dataset, mainly
# used for evaluation.
# - contiguous category id: [0, #classes), in order to train the classifier
# (2) instance id: this id is used to differentiate different instances from
# the same category. For "stuff" classes, the instance id is always 0; for
# "thing" classes, the instance id starts from 1 and 0 is reserved for
# ignored instances (e.g. crowd annotation).
# (3) panoptic id: this is the compact id that encode both category and
# instance id by: category_id * 1000 + instance_id.
thing_dataset_id_to_contiguous_id = {}
stuff_dataset_id_to_contiguous_id = {}
for k in CITYSCAPES_CATEGORIES:
if k["isthing"] == 1:
thing_dataset_id_to_contiguous_id[k["id"]] = k["trainId"]
else:
stuff_dataset_id_to_contiguous_id[k["id"]] = k["trainId"]
meta["thing_dataset_id_to_contiguous_id"] = thing_dataset_id_to_contiguous_id
meta["stuff_dataset_id_to_contiguous_id"] = stuff_dataset_id_to_contiguous_id
for key, (image_dir, gt_dir, gt_json) in _RAW_CITYSCAPES_PANOPTIC_SPLITS.items():
image_dir = os.path.join(root, image_dir)
gt_dir = os.path.join(root, gt_dir)
gt_json = os.path.join(root, gt_json)
DatasetCatalog.register(
key, lambda x=image_dir, y=gt_dir, z=gt_json: load_cityscapes_panoptic(x, y, z, meta)
)
MetadataCatalog.get(key).set(
panoptic_root=gt_dir,
image_root=image_dir,
panoptic_json=gt_json,
gt_dir=gt_dir.replace("cityscapes_panoptic_", ""),
evaluator_type="cityscapes_panoptic_seg",
ignore_label=255,
label_divisor=1000,
**meta,
)
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/cityscapes_panoptic.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
# Autogen with
# with open("lvis_v1_val.json", "r") as f:
# a = json.load(f)
# c = a["categories"]
# for x in c:
# del x["image_count"]
# del x["instance_count"]
# LVIS_CATEGORIES = repr(c) + " # noqa"
# with open("/tmp/lvis_categories.py", "wt") as f:
# f.write(f"LVIS_CATEGORIES = {LVIS_CATEGORIES}")
# Then paste the contents of that file below
# fmt: off
LVIS_CATEGORIES = [{'frequency': 'c', 'synset': 'aerosol.n.02', 'synonyms': ['aerosol_can', 'spray_can'], 'id': 1, 'def': 'a dispenser that holds a substance under pressure', 'name': 'aerosol_can'}, {'frequency': 'f', 'synset': 'air_conditioner.n.01', 'synonyms': ['air_conditioner'], 'id': 2, 'def': 'a machine that keeps air cool and dry', 'name': 'air_conditioner'}, {'frequency': 'f', 'synset': 'airplane.n.01', 'synonyms': ['airplane', 'aeroplane'], 'id': 3, 'def': 'an aircraft that has a fixed wing and is powered by propellers or jets', 'name': 'airplane'}, {'frequency': 'f', 'synset': 'alarm_clock.n.01', 'synonyms': ['alarm_clock'], 'id': 4, 'def': 'a clock that wakes a sleeper at some preset time', 'name': 'alarm_clock'}, {'frequency': 'c', 'synset': 'alcohol.n.01', 'synonyms': ['alcohol', 'alcoholic_beverage'], 'id': 5, 'def': 'a liquor or brew containing alcohol as the active agent', 'name': 'alcohol'}, {'frequency': 'c', 'synset': 'alligator.n.02', 'synonyms': ['alligator', 'gator'], 'id': 6, 'def': 'amphibious reptiles related to crocodiles but with shorter broader snouts', 'name': 'alligator'}, {'frequency': 'c', 'synset': 'almond.n.02', 'synonyms': ['almond'], 'id': 7, 'def': 'oval-shaped edible seed of the almond tree', 'name': 'almond'}, {'frequency': 'c', 'synset': 'ambulance.n.01', 'synonyms': ['ambulance'], 'id': 8, 'def': 'a vehicle that takes people to and from hospitals', 'name': 'ambulance'}, {'frequency': 'c', 'synset': 'amplifier.n.01', 'synonyms': ['amplifier'], 'id': 9, 'def': 'electronic equipment that increases strength of signals', 'name': 'amplifier'}, {'frequency': 'c', 'synset': 'anklet.n.03', 'synonyms': ['anklet', 'ankle_bracelet'], 'id': 10, 'def': 'an ornament worn around the ankle', 'name': 'anklet'}, {'frequency': 'f', 'synset': 'antenna.n.01', 'synonyms': ['antenna', 'aerial', 'transmitting_aerial'], 'id': 11, 'def': 'an electrical device that sends or receives radio or television signals', 'name': 'antenna'}, {'frequency': 'f', 'synset': 'apple.n.01', 'synonyms': ['apple'], 'id': 12, 'def': 'fruit with red or yellow or green skin and sweet to tart crisp whitish flesh', 'name': 'apple'}, {'frequency': 'r', 'synset': 'applesauce.n.01', 'synonyms': ['applesauce'], 'id': 13, 'def': 'puree of stewed apples usually sweetened and spiced', 'name': 'applesauce'}, {'frequency': 'r', 'synset': 'apricot.n.02', 'synonyms': ['apricot'], 'id': 14, 'def': 'downy yellow to rosy-colored fruit resembling a small peach', 'name': 'apricot'}, {'frequency': 'f', 'synset': 'apron.n.01', 'synonyms': ['apron'], 'id': 15, 'def': 'a garment of cloth that is tied about the waist and worn to protect clothing', 'name': 'apron'}, {'frequency': 'c', 'synset': 'aquarium.n.01', 'synonyms': ['aquarium', 'fish_tank'], 'id': 16, 'def': 'a tank/pool/bowl filled with water for keeping live fish and underwater animals', 'name': 'aquarium'}, {'frequency': 'r', 'synset': 'arctic.n.02', 'synonyms': ['arctic_(type_of_shoe)', 'galosh', 'golosh', 'rubber_(type_of_shoe)', 'gumshoe'], 'id': 17, 'def': 'a waterproof overshoe that protects shoes from water or snow', 'name': 'arctic_(type_of_shoe)'}, {'frequency': 'c', 'synset': 'armband.n.02', 'synonyms': ['armband'], 'id': 18, 'def': 'a band worn around the upper arm', 'name': 'armband'}, {'frequency': 'f', 'synset': 'armchair.n.01', 'synonyms': ['armchair'], 'id': 19, 'def': 'chair with a support on each side for arms', 'name': 'armchair'}, {'frequency': 'r', 'synset': 'armoire.n.01', 'synonyms': ['armoire'], 'id': 20, 'def': 'a large wardrobe or cabinet', 'name': 'armoire'}, {'frequency': 'r', 'synset': 'armor.n.01', 'synonyms': ['armor', 'armour'], 'id': 21, 'def': 'protective covering made of metal and used in combat', 'name': 'armor'}, {'frequency': 'c', 'synset': 'artichoke.n.02', 'synonyms': ['artichoke'], 'id': 22, 'def': 'a thistlelike flower head with edible fleshy leaves and heart', 'name': 'artichoke'}, {'frequency': 'f', 'synset': 'ashcan.n.01', 'synonyms': ['trash_can', 'garbage_can', 'wastebin', 'dustbin', 'trash_barrel', 'trash_bin'], 'id': 23, 'def': 'a bin that holds rubbish until it is collected', 'name': 'trash_can'}, {'frequency': 'c', 'synset': 'ashtray.n.01', 'synonyms': ['ashtray'], 'id': 24, 'def': "a receptacle for the ash from smokers' cigars or cigarettes", 'name': 'ashtray'}, {'frequency': 'c', 'synset': 'asparagus.n.02', 'synonyms': ['asparagus'], 'id': 25, 'def': 'edible young shoots of the asparagus plant', 'name': 'asparagus'}, {'frequency': 'c', 'synset': 'atomizer.n.01', 'synonyms': ['atomizer', 'atomiser', 'spray', 'sprayer', 'nebulizer', 'nebuliser'], 'id': 26, 'def': 'a dispenser that turns a liquid (such as perfume) into a fine mist', 'name': 'atomizer'}, {'frequency': 'f', 'synset': 'avocado.n.01', 'synonyms': ['avocado'], 'id': 27, 'def': 'a pear-shaped fruit with green or blackish skin and rich yellowish pulp enclosing a single large seed', 'name': 'avocado'}, {'frequency': 'c', 'synset': 'award.n.02', 'synonyms': ['award', 'accolade'], 'id': 28, 'def': 'a tangible symbol signifying approval or distinction', 'name': 'award'}, {'frequency': 'f', 'synset': 'awning.n.01', 'synonyms': ['awning'], 'id': 29, 'def': 'a canopy made of canvas to shelter people or things from rain or sun', 'name': 'awning'}, {'frequency': 'r', 'synset': 'ax.n.01', 'synonyms': ['ax', 'axe'], 'id': 30, 'def': 'an edge tool with a heavy bladed head mounted across a handle', 'name': 'ax'}, {'frequency': 'r', 'synset': 'baboon.n.01', 'synonyms': ['baboon'], 'id': 31, 'def': 'large terrestrial monkeys having doglike muzzles', 'name': 'baboon'}, {'frequency': 'f', 'synset': 'baby_buggy.n.01', 'synonyms': ['baby_buggy', 'baby_carriage', 'perambulator', 'pram', 'stroller'], 'id': 32, 'def': 'a small vehicle with four wheels in which a baby or child is pushed around', 'name': 'baby_buggy'}, {'frequency': 'c', 'synset': 'backboard.n.01', 'synonyms': ['basketball_backboard'], 'id': 33, 'def': 'a raised vertical board with basket attached; used to play basketball', 'name': 'basketball_backboard'}, {'frequency': 'f', 'synset': 'backpack.n.01', 'synonyms': ['backpack', 'knapsack', 'packsack', 'rucksack', 'haversack'], 'id': 34, 'def': 'a bag carried by a strap on your back or shoulder', 'name': 'backpack'}, {'frequency': 'f', 'synset': 'bag.n.04', 'synonyms': ['handbag', 'purse', 'pocketbook'], 'id': 35, 'def': 'a container used for carrying money and small personal items or accessories', 'name': 'handbag'}, {'frequency': 'f', 'synset': 'bag.n.06', 'synonyms': ['suitcase', 'baggage', 'luggage'], 'id': 36, 'def': 'cases used to carry belongings when traveling', 'name': 'suitcase'}, {'frequency': 'c', 'synset': 'bagel.n.01', 'synonyms': ['bagel', 'beigel'], 'id': 37, 'def': 'glazed yeast-raised doughnut-shaped roll with hard crust', 'name': 'bagel'}, {'frequency': 'r', 'synset': 'bagpipe.n.01', 'synonyms': ['bagpipe'], 'id': 38, 'def': 'a tubular wind instrument; the player blows air into a bag and squeezes it out', 'name': 'bagpipe'}, {'frequency': 'r', 'synset': 'baguet.n.01', 'synonyms': ['baguet', 'baguette'], 'id': 39, 'def': 'narrow French stick loaf', 'name': 'baguet'}, {'frequency': 'r', 'synset': 'bait.n.02', 'synonyms': ['bait', 'lure'], 'id': 40, 'def': 'something used to lure fish or other animals into danger so they can be trapped or killed', 'name': 'bait'}, {'frequency': 'f', 'synset': 'ball.n.06', 'synonyms': ['ball'], 'id': 41, 'def': 'a spherical object used as a plaything', 'name': 'ball'}, {'frequency': 'r', 'synset': 'ballet_skirt.n.01', 'synonyms': ['ballet_skirt', 'tutu'], 'id': 42, 'def': 'very short skirt worn by ballerinas', 'name': 'ballet_skirt'}, {'frequency': 'f', 'synset': 'balloon.n.01', 'synonyms': ['balloon'], 'id': 43, 'def': 'large tough nonrigid bag filled with gas or heated air', 'name': 'balloon'}, {'frequency': 'c', 'synset': 'bamboo.n.02', 'synonyms': ['bamboo'], 'id': 44, 'def': 'woody tropical grass having hollow woody stems', 'name': 'bamboo'}, {'frequency': 'f', 'synset': 'banana.n.02', 'synonyms': ['banana'], 'id': 45, 'def': 'elongated crescent-shaped yellow fruit with soft sweet flesh', 'name': 'banana'}, {'frequency': 'c', 'synset': 'band_aid.n.01', 'synonyms': ['Band_Aid'], 'id': 46, 'def': 'trade name for an adhesive bandage to cover small cuts or blisters', 'name': 'Band_Aid'}, {'frequency': 'c', 'synset': 'bandage.n.01', 'synonyms': ['bandage'], 'id': 47, 'def': 'a piece of soft material that covers and protects an injured part of the body', 'name': 'bandage'}, {'frequency': 'f', 'synset': 'bandanna.n.01', 'synonyms': ['bandanna', 'bandana'], 'id': 48, 'def': 'large and brightly colored handkerchief; often used as a neckerchief', 'name': 'bandanna'}, {'frequency': 'r', 'synset': 'banjo.n.01', 'synonyms': ['banjo'], 'id': 49, 'def': 'a stringed instrument of the guitar family with a long neck and circular body', 'name': 'banjo'}, {'frequency': 'f', 'synset': 'banner.n.01', 'synonyms': ['banner', 'streamer'], 'id': 50, 'def': 'long strip of cloth or paper used for decoration or advertising', 'name': 'banner'}, {'frequency': 'r', 'synset': 'barbell.n.01', 'synonyms': ['barbell'], 'id': 51, 'def': 'a bar to which heavy discs are attached at each end; used in weightlifting', 'name': 'barbell'}, {'frequency': 'r', 'synset': 'barge.n.01', 'synonyms': ['barge'], 'id': 52, 'def': 'a flatbottom boat for carrying heavy loads (especially on canals)', 'name': 'barge'}, {'frequency': 'f', 'synset': 'barrel.n.02', 'synonyms': ['barrel', 'cask'], 'id': 53, 'def': 'a cylindrical container that holds liquids', 'name': 'barrel'}, {'frequency': 'c', 'synset': 'barrette.n.01', 'synonyms': ['barrette'], 'id': 54, 'def': "a pin for holding women's hair in place", 'name': 'barrette'}, {'frequency': 'c', 'synset': 'barrow.n.03', 'synonyms': ['barrow', 'garden_cart', 'lawn_cart', 'wheelbarrow'], 'id': 55, 'def': 'a cart for carrying small loads; has handles and one or more wheels', 'name': 'barrow'}, {'frequency': 'f', 'synset': 'base.n.03', 'synonyms': ['baseball_base'], 'id': 56, 'def': 'a place that the runner must touch before scoring', 'name': 'baseball_base'}, {'frequency': 'f', 'synset': 'baseball.n.02', 'synonyms': ['baseball'], 'id': 57, 'def': 'a ball used in playing baseball', 'name': 'baseball'}, {'frequency': 'f', 'synset': 'baseball_bat.n.01', 'synonyms': ['baseball_bat'], 'id': 58, 'def': 'an implement used in baseball by the batter', 'name': 'baseball_bat'}, {'frequency': 'f', 'synset': 'baseball_cap.n.01', 'synonyms': ['baseball_cap', 'jockey_cap', 'golf_cap'], 'id': 59, 'def': 'a cap with a bill', 'name': 'baseball_cap'}, {'frequency': 'f', 'synset': 'baseball_glove.n.01', 'synonyms': ['baseball_glove', 'baseball_mitt'], 'id': 60, 'def': 'the handwear used by fielders in playing baseball', 'name': 'baseball_glove'}, {'frequency': 'f', 'synset': 'basket.n.01', 'synonyms': ['basket', 'handbasket'], 'id': 61, 'def': 'a container that is usually woven and has handles', 'name': 'basket'}, {'frequency': 'c', 'synset': 'basketball.n.02', 'synonyms': ['basketball'], 'id': 62, 'def': 'an inflated ball used in playing basketball', 'name': 'basketball'}, {'frequency': 'r', 'synset': 'bass_horn.n.01', 'synonyms': ['bass_horn', 'sousaphone', 'tuba'], 'id': 63, 'def': 'the lowest brass wind instrument', 'name': 'bass_horn'}, {'frequency': 'c', 'synset': 'bat.n.01', 'synonyms': ['bat_(animal)'], 'id': 64, 'def': 'nocturnal mouselike mammal with forelimbs modified to form membranous wings', 'name': 'bat_(animal)'}, {'frequency': 'f', 'synset': 'bath_mat.n.01', 'synonyms': ['bath_mat'], 'id': 65, 'def': 'a heavy towel or mat to stand on while drying yourself after a bath', 'name': 'bath_mat'}, {'frequency': 'f', 'synset': 'bath_towel.n.01', 'synonyms': ['bath_towel'], 'id': 66, 'def': 'a large towel; to dry yourself after a bath', 'name': 'bath_towel'}, {'frequency': 'c', 'synset': 'bathrobe.n.01', 'synonyms': ['bathrobe'], 'id': 67, 'def': 'a loose-fitting robe of towelling; worn after a bath or swim', 'name': 'bathrobe'}, {'frequency': 'f', 'synset': 'bathtub.n.01', 'synonyms': ['bathtub', 'bathing_tub'], 'id': 68, 'def': 'a large open container that you fill with water and use to wash the body', 'name': 'bathtub'}, {'frequency': 'r', 'synset': 'batter.n.02', 'synonyms': ['batter_(food)'], 'id': 69, 'def': 'a liquid or semiliquid mixture, as of flour, eggs, and milk, used in cooking', 'name': 'batter_(food)'}, {'frequency': 'c', 'synset': 'battery.n.02', 'synonyms': ['battery'], 'id': 70, 'def': 'a portable device that produces electricity', 'name': 'battery'}, {'frequency': 'r', 'synset': 'beach_ball.n.01', 'synonyms': ['beachball'], 'id': 71, 'def': 'large and light ball; for play at the seaside', 'name': 'beachball'}, {'frequency': 'c', 'synset': 'bead.n.01', 'synonyms': ['bead'], 'id': 72, 'def': 'a small ball with a hole through the middle used for ornamentation, jewellery, etc.', 'name': 'bead'}, {'frequency': 'c', 'synset': 'bean_curd.n.01', 'synonyms': ['bean_curd', 'tofu'], 'id': 73, 'def': 'cheeselike food made of curdled soybean milk', 'name': 'bean_curd'}, {'frequency': 'c', 'synset': 'beanbag.n.01', 'synonyms': ['beanbag'], 'id': 74, 'def': 'a bag filled with dried beans or similar items; used in games or to sit on', 'name': 'beanbag'}, {'frequency': 'f', 'synset': 'beanie.n.01', 'synonyms': ['beanie', 'beany'], 'id': 75, 'def': 'a small skullcap; formerly worn by schoolboys and college freshmen', 'name': 'beanie'}, {'frequency': 'f', 'synset': 'bear.n.01', 'synonyms': ['bear'], 'id': 76, 'def': 'large carnivorous or omnivorous mammals with shaggy coats and claws', 'name': 'bear'}, {'frequency': 'f', 'synset': 'bed.n.01', 'synonyms': ['bed'], 'id': 77, 'def': 'a piece of furniture that provides a place to sleep', 'name': 'bed'}, {'frequency': 'r', 'synset': 'bedpan.n.01', 'synonyms': ['bedpan'], 'id': 78, 'def': 'a shallow vessel used by a bedridden patient for defecation and urination', 'name': 'bedpan'}, {'frequency': 'f', 'synset': 'bedspread.n.01', 'synonyms': ['bedspread', 'bedcover', 'bed_covering', 'counterpane', 'spread'], 'id': 79, 'def': 'decorative cover for a bed', 'name': 'bedspread'}, {'frequency': 'f', 'synset': 'beef.n.01', 'synonyms': ['cow'], 'id': 80, 'def': 'cattle/cow', 'name': 'cow'}, {'frequency': 'f', 'synset': 'beef.n.02', 'synonyms': ['beef_(food)', 'boeuf_(food)'], 'id': 81, 'def': 'meat from an adult domestic bovine', 'name': 'beef_(food)'}, {'frequency': 'r', 'synset': 'beeper.n.01', 'synonyms': ['beeper', 'pager'], 'id': 82, 'def': 'an device that beeps when the person carrying it is being paged', 'name': 'beeper'}, {'frequency': 'f', 'synset': 'beer_bottle.n.01', 'synonyms': ['beer_bottle'], 'id': 83, 'def': 'a bottle that holds beer', 'name': 'beer_bottle'}, {'frequency': 'c', 'synset': 'beer_can.n.01', 'synonyms': ['beer_can'], 'id': 84, 'def': 'a can that holds beer', 'name': 'beer_can'}, {'frequency': 'r', 'synset': 'beetle.n.01', 'synonyms': ['beetle'], 'id': 85, 'def': 'insect with hard wing covers', 'name': 'beetle'}, {'frequency': 'f', 'synset': 'bell.n.01', 'synonyms': ['bell'], 'id': 86, 'def': 'a hollow device made of metal that makes a ringing sound when struck', 'name': 'bell'}, {'frequency': 'f', 'synset': 'bell_pepper.n.02', 'synonyms': ['bell_pepper', 'capsicum'], 'id': 87, 'def': 'large bell-shaped sweet pepper in green or red or yellow or orange or black varieties', 'name': 'bell_pepper'}, {'frequency': 'f', 'synset': 'belt.n.02', 'synonyms': ['belt'], 'id': 88, 'def': 'a band to tie or buckle around the body (usually at the waist)', 'name': 'belt'}, {'frequency': 'f', 'synset': 'belt_buckle.n.01', 'synonyms': ['belt_buckle'], 'id': 89, 'def': 'the buckle used to fasten a belt', 'name': 'belt_buckle'}, {'frequency': 'f', 'synset': 'bench.n.01', 'synonyms': ['bench'], 'id': 90, 'def': 'a long seat for more than one person', 'name': 'bench'}, {'frequency': 'c', 'synset': 'beret.n.01', 'synonyms': ['beret'], 'id': 91, 'def': 'a cap with no brim or bill; made of soft cloth', 'name': 'beret'}, {'frequency': 'c', 'synset': 'bib.n.02', 'synonyms': ['bib'], 'id': 92, 'def': 'a napkin tied under the chin of a child while eating', 'name': 'bib'}, {'frequency': 'r', 'synset': 'bible.n.01', 'synonyms': ['Bible'], 'id': 93, 'def': 'the sacred writings of the Christian religions', 'name': 'Bible'}, {'frequency': 'f', 'synset': 'bicycle.n.01', 'synonyms': ['bicycle', 'bike_(bicycle)'], 'id': 94, 'def': 'a wheeled vehicle that has two wheels and is moved by foot pedals', 'name': 'bicycle'}, {'frequency': 'f', 'synset': 'bill.n.09', 'synonyms': ['visor', 'vizor'], 'id': 95, 'def': 'a brim that projects to the front to shade the eyes', 'name': 'visor'}, {'frequency': 'f', 'synset': 'billboard.n.01', 'synonyms': ['billboard'], 'id': 96, 'def': 'large outdoor signboard', 'name': 'billboard'}, {'frequency': 'c', 'synset': 'binder.n.03', 'synonyms': ['binder', 'ring-binder'], 'id': 97, 'def': 'holds loose papers or magazines', 'name': 'binder'}, {'frequency': 'c', 'synset': 'binoculars.n.01', 'synonyms': ['binoculars', 'field_glasses', 'opera_glasses'], 'id': 98, 'def': 'an optical instrument designed for simultaneous use by both eyes', 'name': 'binoculars'}, {'frequency': 'f', 'synset': 'bird.n.01', 'synonyms': ['bird'], 'id': 99, 'def': 'animal characterized by feathers and wings', 'name': 'bird'}, {'frequency': 'c', 'synset': 'bird_feeder.n.01', 'synonyms': ['birdfeeder'], 'id': 100, 'def': 'an outdoor device that supplies food for wild birds', 'name': 'birdfeeder'}, {'frequency': 'c', 'synset': 'birdbath.n.01', 'synonyms': ['birdbath'], 'id': 101, 'def': 'an ornamental basin (usually in a garden) for birds to bathe in', 'name': 'birdbath'}, {'frequency': 'c', 'synset': 'birdcage.n.01', 'synonyms': ['birdcage'], 'id': 102, 'def': 'a cage in which a bird can be kept', 'name': 'birdcage'}, {'frequency': 'c', 'synset': 'birdhouse.n.01', 'synonyms': ['birdhouse'], 'id': 103, 'def': 'a shelter for birds', 'name': 'birdhouse'}, {'frequency': 'f', 'synset': 'birthday_cake.n.01', 'synonyms': ['birthday_cake'], 'id': 104, 'def': 'decorated cake served at a birthday party', 'name': 'birthday_cake'}, {'frequency': 'r', 'synset': 'birthday_card.n.01', 'synonyms': ['birthday_card'], 'id': 105, 'def': 'a card expressing a birthday greeting', 'name': 'birthday_card'}, {'frequency': 'r', 'synset': 'black_flag.n.01', 'synonyms': ['pirate_flag'], 'id': 106, 'def': 'a flag usually bearing a white skull and crossbones on a black background', 'name': 'pirate_flag'}, {'frequency': 'c', 'synset': 'black_sheep.n.02', 'synonyms': ['black_sheep'], 'id': 107, 'def': 'sheep with a black coat', 'name': 'black_sheep'}, {'frequency': 'c', 'synset': 'blackberry.n.01', 'synonyms': ['blackberry'], 'id': 108, 'def': 'large sweet black or very dark purple edible aggregate fruit', 'name': 'blackberry'}, {'frequency': 'f', 'synset': 'blackboard.n.01', 'synonyms': ['blackboard', 'chalkboard'], 'id': 109, 'def': 'sheet of slate; for writing with chalk', 'name': 'blackboard'}, {'frequency': 'f', 'synset': 'blanket.n.01', 'synonyms': ['blanket'], 'id': 110, 'def': 'bedding that keeps a person warm in bed', 'name': 'blanket'}, {'frequency': 'c', 'synset': 'blazer.n.01', 'synonyms': ['blazer', 'sport_jacket', 'sport_coat', 'sports_jacket', 'sports_coat'], 'id': 111, 'def': 'lightweight jacket; often striped in the colors of a club or school', 'name': 'blazer'}, {'frequency': 'f', 'synset': 'blender.n.01', 'synonyms': ['blender', 'liquidizer', 'liquidiser'], 'id': 112, 'def': 'an electrically powered mixer that mix or chop or liquefy foods', 'name': 'blender'}, {'frequency': 'r', 'synset': 'blimp.n.02', 'synonyms': ['blimp'], 'id': 113, 'def': 'a small nonrigid airship used for observation or as a barrage balloon', 'name': 'blimp'}, {'frequency': 'f', 'synset': 'blinker.n.01', 'synonyms': ['blinker', 'flasher'], 'id': 114, 'def': 'a light that flashes on and off; used as a signal or to send messages', 'name': 'blinker'}, {'frequency': 'f', 'synset': 'blouse.n.01', 'synonyms': ['blouse'], 'id': 115, 'def': 'a top worn by women', 'name': 'blouse'}, {'frequency': 'f', 'synset': 'blueberry.n.02', 'synonyms': ['blueberry'], 'id': 116, 'def': 'sweet edible dark-blue berries of blueberry plants', 'name': 'blueberry'}, {'frequency': 'r', 'synset': 'board.n.09', 'synonyms': ['gameboard'], 'id': 117, 'def': 'a flat portable surface (usually rectangular) designed for board games', 'name': 'gameboard'}, {'frequency': 'f', 'synset': 'boat.n.01', 'synonyms': ['boat', 'ship_(boat)'], 'id': 118, 'def': 'a vessel for travel on water', 'name': 'boat'}, {'frequency': 'r', 'synset': 'bob.n.05', 'synonyms': ['bob', 'bobber', 'bobfloat'], 'id': 119, 'def': 'a small float usually made of cork; attached to a fishing line', 'name': 'bob'}, {'frequency': 'c', 'synset': 'bobbin.n.01', 'synonyms': ['bobbin', 'spool', 'reel'], 'id': 120, 'def': 'a thing around which thread/tape/film or other flexible materials can be wound', 'name': 'bobbin'}, {'frequency': 'c', 'synset': 'bobby_pin.n.01', 'synonyms': ['bobby_pin', 'hairgrip'], 'id': 121, 'def': 'a flat wire hairpin used to hold bobbed hair in place', 'name': 'bobby_pin'}, {'frequency': 'c', 'synset': 'boiled_egg.n.01', 'synonyms': ['boiled_egg', 'coddled_egg'], 'id': 122, 'def': 'egg cooked briefly in the shell in gently boiling water', 'name': 'boiled_egg'}, {'frequency': 'r', 'synset': 'bolo_tie.n.01', 'synonyms': ['bolo_tie', 'bolo', 'bola_tie', 'bola'], 'id': 123, 'def': 'a cord fastened around the neck with an ornamental clasp and worn as a necktie', 'name': 'bolo_tie'}, {'frequency': 'c', 'synset': 'bolt.n.03', 'synonyms': ['deadbolt'], 'id': 124, 'def': 'the part of a lock that is engaged or withdrawn with a key', 'name': 'deadbolt'}, {'frequency': 'f', 'synset': 'bolt.n.06', 'synonyms': ['bolt'], 'id': 125, 'def': 'a screw that screws into a nut to form a fastener', 'name': 'bolt'}, {'frequency': 'r', 'synset': 'bonnet.n.01', 'synonyms': ['bonnet'], 'id': 126, 'def': 'a hat tied under the chin', 'name': 'bonnet'}, {'frequency': 'f', 'synset': 'book.n.01', 'synonyms': ['book'], 'id': 127, 'def': 'a written work or composition that has been published', 'name': 'book'}, {'frequency': 'c', 'synset': 'bookcase.n.01', 'synonyms': ['bookcase'], 'id': 128, 'def': 'a piece of furniture with shelves for storing books', 'name': 'bookcase'}, {'frequency': 'c', 'synset': 'booklet.n.01', 'synonyms': ['booklet', 'brochure', 'leaflet', 'pamphlet'], 'id': 129, 'def': 'a small book usually having a paper cover', 'name': 'booklet'}, {'frequency': 'r', 'synset': 'bookmark.n.01', 'synonyms': ['bookmark', 'bookmarker'], 'id': 130, 'def': 'a marker (a piece of paper or ribbon) placed between the pages of a book', 'name': 'bookmark'}, {'frequency': 'r', 'synset': 'boom.n.04', 'synonyms': ['boom_microphone', 'microphone_boom'], 'id': 131, 'def': 'a pole carrying an overhead microphone projected over a film or tv set', 'name': 'boom_microphone'}, {'frequency': 'f', 'synset': 'boot.n.01', 'synonyms': ['boot'], 'id': 132, 'def': 'footwear that covers the whole foot and lower leg', 'name': 'boot'}, {'frequency': 'f', 'synset': 'bottle.n.01', 'synonyms': ['bottle'], 'id': 133, 'def': 'a glass or plastic vessel used for storing drinks or other liquids', 'name': 'bottle'}, {'frequency': 'c', 'synset': 'bottle_opener.n.01', 'synonyms': ['bottle_opener'], 'id': 134, 'def': 'an opener for removing caps or corks from bottles', 'name': 'bottle_opener'}, {'frequency': 'c', 'synset': 'bouquet.n.01', 'synonyms': ['bouquet'], 'id': 135, 'def': 'an arrangement of flowers that is usually given as a present', 'name': 'bouquet'}, {'frequency': 'r', 'synset': 'bow.n.04', 'synonyms': ['bow_(weapon)'], 'id': 136, 'def': 'a weapon for shooting arrows', 'name': 'bow_(weapon)'}, {'frequency': 'f', 'synset': 'bow.n.08', 'synonyms': ['bow_(decorative_ribbons)'], 'id': 137, 'def': 'a decorative interlacing of ribbons', 'name': 'bow_(decorative_ribbons)'}, {'frequency': 'f', 'synset': 'bow_tie.n.01', 'synonyms': ['bow-tie', 'bowtie'], 'id': 138, 'def': "a man's tie that ties in a bow", 'name': 'bow-tie'}, {'frequency': 'f', 'synset': 'bowl.n.03', 'synonyms': ['bowl'], 'id': 139, 'def': 'a dish that is round and open at the top for serving foods', 'name': 'bowl'}, {'frequency': 'r', 'synset': 'bowl.n.08', 'synonyms': ['pipe_bowl'], 'id': 140, 'def': 'a small round container that is open at the top for holding tobacco', 'name': 'pipe_bowl'}, {'frequency': 'c', 'synset': 'bowler_hat.n.01', 'synonyms': ['bowler_hat', 'bowler', 'derby_hat', 'derby', 'plug_hat'], 'id': 141, 'def': 'a felt hat that is round and hard with a narrow brim', 'name': 'bowler_hat'}, {'frequency': 'r', 'synset': 'bowling_ball.n.01', 'synonyms': ['bowling_ball'], 'id': 142, 'def': 'a large ball with finger holes used in the sport of bowling', 'name': 'bowling_ball'}, {'frequency': 'f', 'synset': 'box.n.01', 'synonyms': ['box'], 'id': 143, 'def': 'a (usually rectangular) container; may have a lid', 'name': 'box'}, {'frequency': 'r', 'synset': 'boxing_glove.n.01', 'synonyms': ['boxing_glove'], 'id': 144, 'def': 'large glove coverings the fists of a fighter worn for the sport of boxing', 'name': 'boxing_glove'}, {'frequency': 'c', 'synset': 'brace.n.06', 'synonyms': ['suspenders'], 'id': 145, 'def': 'elastic straps that hold trousers up (usually used in the plural)', 'name': 'suspenders'}, {'frequency': 'f', 'synset': 'bracelet.n.02', 'synonyms': ['bracelet', 'bangle'], 'id': 146, 'def': 'jewelry worn around the wrist for decoration', 'name': 'bracelet'}, {'frequency': 'r', 'synset': 'brass.n.07', 'synonyms': ['brass_plaque'], 'id': 147, 'def': 'a memorial made of brass', 'name': 'brass_plaque'}, {'frequency': 'c', 'synset': 'brassiere.n.01', 'synonyms': ['brassiere', 'bra', 'bandeau'], 'id': 148, 'def': 'an undergarment worn by women to support their breasts', 'name': 'brassiere'}, {'frequency': 'c', 'synset': 'bread-bin.n.01', 'synonyms': ['bread-bin', 'breadbox'], 'id': 149, 'def': 'a container used to keep bread or cake in', 'name': 'bread-bin'}, {'frequency': 'f', 'synset': 'bread.n.01', 'synonyms': ['bread'], 'id': 150, 'def': 'food made from dough of flour or meal and usually raised with yeast or baking powder and then baked', 'name': 'bread'}, {'frequency': 'r', 'synset': 'breechcloth.n.01', 'synonyms': ['breechcloth', 'breechclout', 'loincloth'], 'id': 151, 'def': 'a garment that provides covering for the loins', 'name': 'breechcloth'}, {'frequency': 'f', 'synset': 'bridal_gown.n.01', 'synonyms': ['bridal_gown', 'wedding_gown', 'wedding_dress'], 'id': 152, 'def': 'a gown worn by the bride at a wedding', 'name': 'bridal_gown'}, {'frequency': 'c', 'synset': 'briefcase.n.01', 'synonyms': ['briefcase'], 'id': 153, 'def': 'a case with a handle; for carrying papers or files or books', 'name': 'briefcase'}, {'frequency': 'f', 'synset': 'broccoli.n.01', 'synonyms': ['broccoli'], 'id': 154, 'def': 'plant with dense clusters of tight green flower buds', 'name': 'broccoli'}, {'frequency': 'r', 'synset': 'brooch.n.01', 'synonyms': ['broach'], 'id': 155, 'def': 'a decorative pin worn by women', 'name': 'broach'}, {'frequency': 'c', 'synset': 'broom.n.01', 'synonyms': ['broom'], 'id': 156, 'def': 'bundle of straws or twigs attached to a long handle; used for cleaning', 'name': 'broom'}, {'frequency': 'c', 'synset': 'brownie.n.03', 'synonyms': ['brownie'], 'id': 157, 'def': 'square or bar of very rich chocolate cake usually with nuts', 'name': 'brownie'}, {'frequency': 'c', 'synset': 'brussels_sprouts.n.01', 'synonyms': ['brussels_sprouts'], 'id': 158, 'def': 'the small edible cabbage-like buds growing along a stalk', 'name': 'brussels_sprouts'}, {'frequency': 'r', 'synset': 'bubble_gum.n.01', 'synonyms': ['bubble_gum'], 'id': 159, 'def': 'a kind of chewing gum that can be blown into bubbles', 'name': 'bubble_gum'}, {'frequency': 'f', 'synset': 'bucket.n.01', 'synonyms': ['bucket', 'pail'], 'id': 160, 'def': 'a roughly cylindrical vessel that is open at the top', 'name': 'bucket'}, {'frequency': 'r', 'synset': 'buggy.n.01', 'synonyms': ['horse_buggy'], 'id': 161, 'def': 'a small lightweight carriage; drawn by a single horse', 'name': 'horse_buggy'}, {'frequency': 'c', 'synset': 'bull.n.11', 'synonyms': ['horned_cow'], 'id': 162, 'def': 'a cow with horns', 'name': 'bull'}, {'frequency': 'c', 'synset': 'bulldog.n.01', 'synonyms': ['bulldog'], 'id': 163, 'def': 'a thickset short-haired dog with a large head and strong undershot lower jaw', 'name': 'bulldog'}, {'frequency': 'r', 'synset': 'bulldozer.n.01', 'synonyms': ['bulldozer', 'dozer'], 'id': 164, 'def': 'large powerful tractor; a large blade in front flattens areas of ground', 'name': 'bulldozer'}, {'frequency': 'c', 'synset': 'bullet_train.n.01', 'synonyms': ['bullet_train'], 'id': 165, 'def': 'a high-speed passenger train', 'name': 'bullet_train'}, {'frequency': 'c', 'synset': 'bulletin_board.n.02', 'synonyms': ['bulletin_board', 'notice_board'], 'id': 166, 'def': 'a board that hangs on a wall; displays announcements', 'name': 'bulletin_board'}, {'frequency': 'r', 'synset': 'bulletproof_vest.n.01', 'synonyms': ['bulletproof_vest'], 'id': 167, 'def': 'a vest capable of resisting the impact of a bullet', 'name': 'bulletproof_vest'}, {'frequency': 'c', 'synset': 'bullhorn.n.01', 'synonyms': ['bullhorn', 'megaphone'], 'id': 168, 'def': 'a portable loudspeaker with built-in microphone and amplifier', 'name': 'bullhorn'}, {'frequency': 'f', 'synset': 'bun.n.01', 'synonyms': ['bun', 'roll'], 'id': 169, 'def': 'small rounded bread either plain or sweet', 'name': 'bun'}, {'frequency': 'c', 'synset': 'bunk_bed.n.01', 'synonyms': ['bunk_bed'], 'id': 170, 'def': 'beds built one above the other', 'name': 'bunk_bed'}, {'frequency': 'f', 'synset': 'buoy.n.01', 'synonyms': ['buoy'], 'id': 171, 'def': 'a float attached by rope to the seabed to mark channels in a harbor or underwater hazards', 'name': 'buoy'}, {'frequency': 'r', 'synset': 'burrito.n.01', 'synonyms': ['burrito'], 'id': 172, 'def': 'a flour tortilla folded around a filling', 'name': 'burrito'}, {'frequency': 'f', 'synset': 'bus.n.01', 'synonyms': ['bus_(vehicle)', 'autobus', 'charabanc', 'double-decker', 'motorbus', 'motorcoach'], 'id': 173, 'def': 'a vehicle carrying many passengers; used for public transport', 'name': 'bus_(vehicle)'}, {'frequency': 'c', 'synset': 'business_card.n.01', 'synonyms': ['business_card'], 'id': 174, 'def': "a card on which are printed the person's name and business affiliation", 'name': 'business_card'}, {'frequency': 'f', 'synset': 'butter.n.01', 'synonyms': ['butter'], 'id': 175, 'def': 'an edible emulsion of fat globules made by churning milk or cream; for cooking and table use', 'name': 'butter'}, {'frequency': 'c', 'synset': 'butterfly.n.01', 'synonyms': ['butterfly'], 'id': 176, 'def': 'insect typically having a slender body with knobbed antennae and broad colorful wings', 'name': 'butterfly'}, {'frequency': 'f', 'synset': 'button.n.01', 'synonyms': ['button'], 'id': 177, 'def': 'a round fastener sewn to shirts and coats etc to fit through buttonholes', 'name': 'button'}, {'frequency': 'f', 'synset': 'cab.n.03', 'synonyms': ['cab_(taxi)', 'taxi', 'taxicab'], 'id': 178, 'def': 'a car that takes passengers where they want to go in exchange for money', 'name': 'cab_(taxi)'}, {'frequency': 'r', 'synset': 'cabana.n.01', 'synonyms': ['cabana'], 'id': 179, 'def': 'a small tent used as a dressing room beside the sea or a swimming pool', 'name': 'cabana'}, {'frequency': 'c', 'synset': 'cabin_car.n.01', 'synonyms': ['cabin_car', 'caboose'], 'id': 180, 'def': 'a car on a freight train for use of the train crew; usually the last car on the train', 'name': 'cabin_car'}, {'frequency': 'f', 'synset': 'cabinet.n.01', 'synonyms': ['cabinet'], 'id': 181, 'def': 'a piece of furniture resembling a cupboard with doors and shelves and drawers', 'name': 'cabinet'}, {'frequency': 'r', 'synset': 'cabinet.n.03', 'synonyms': ['locker', 'storage_locker'], 'id': 182, 'def': 'a storage compartment for clothes and valuables; usually it has a lock', 'name': 'locker'}, {'frequency': 'f', 'synset': 'cake.n.03', 'synonyms': ['cake'], 'id': 183, 'def': 'baked goods made from or based on a mixture of flour, sugar, eggs, and fat', 'name': 'cake'}, {'frequency': 'c', 'synset': 'calculator.n.02', 'synonyms': ['calculator'], 'id': 184, 'def': 'a small machine that is used for mathematical calculations', 'name': 'calculator'}, {'frequency': 'f', 'synset': 'calendar.n.02', 'synonyms': ['calendar'], 'id': 185, 'def': 'a list or register of events (appointments/social events/court cases, etc)', 'name': 'calendar'}, {'frequency': 'c', 'synset': 'calf.n.01', 'synonyms': ['calf'], 'id': 186, 'def': 'young of domestic cattle', 'name': 'calf'}, {'frequency': 'c', 'synset': 'camcorder.n.01', 'synonyms': ['camcorder'], 'id': 187, 'def': 'a portable television camera and videocassette recorder', 'name': 'camcorder'}, {'frequency': 'c', 'synset': 'camel.n.01', 'synonyms': ['camel'], 'id': 188, 'def': 'cud-chewing mammal used as a draft or saddle animal in desert regions', 'name': 'camel'}, {'frequency': 'f', 'synset': 'camera.n.01', 'synonyms': ['camera'], 'id': 189, 'def': 'equipment for taking photographs', 'name': 'camera'}, {'frequency': 'c', 'synset': 'camera_lens.n.01', 'synonyms': ['camera_lens'], 'id': 190, 'def': 'a lens that focuses the image in a camera', 'name': 'camera_lens'}, {'frequency': 'c', 'synset': 'camper.n.02', 'synonyms': ['camper_(vehicle)', 'camping_bus', 'motor_home'], 'id': 191, 'def': 'a recreational vehicle equipped for camping out while traveling', 'name': 'camper_(vehicle)'}, {'frequency': 'f', 'synset': 'can.n.01', 'synonyms': ['can', 'tin_can'], 'id': 192, 'def': 'airtight sealed metal container for food or drink or paint etc.', 'name': 'can'}, {'frequency': 'c', 'synset': 'can_opener.n.01', 'synonyms': ['can_opener', 'tin_opener'], 'id': 193, 'def': 'a device for cutting cans open', 'name': 'can_opener'}, {'frequency': 'f', 'synset': 'candle.n.01', 'synonyms': ['candle', 'candlestick'], 'id': 194, 'def': 'stick of wax with a wick in the middle', 'name': 'candle'}, {'frequency': 'f', 'synset': 'candlestick.n.01', 'synonyms': ['candle_holder'], 'id': 195, 'def': 'a holder with sockets for candles', 'name': 'candle_holder'}, {'frequency': 'r', 'synset': 'candy_bar.n.01', 'synonyms': ['candy_bar'], 'id': 196, 'def': 'a candy shaped as a bar', 'name': 'candy_bar'}, {'frequency': 'c', 'synset': 'candy_cane.n.01', 'synonyms': ['candy_cane'], 'id': 197, 'def': 'a hard candy in the shape of a rod (usually with stripes)', 'name': 'candy_cane'}, {'frequency': 'c', 'synset': 'cane.n.01', 'synonyms': ['walking_cane'], 'id': 198, 'def': 'a stick that people can lean on to help them walk', 'name': 'walking_cane'}, {'frequency': 'c', 'synset': 'canister.n.02', 'synonyms': ['canister', 'cannister'], 'id': 199, 'def': 'metal container for storing dry foods such as tea or flour', 'name': 'canister'}, {'frequency': 'c', 'synset': 'canoe.n.01', 'synonyms': ['canoe'], 'id': 200, 'def': 'small and light boat; pointed at both ends; propelled with a paddle', 'name': 'canoe'}, {'frequency': 'c', 'synset': 'cantaloup.n.02', 'synonyms': ['cantaloup', 'cantaloupe'], 'id': 201, 'def': 'the fruit of a cantaloup vine; small to medium-sized melon with yellowish flesh', 'name': 'cantaloup'}, {'frequency': 'r', 'synset': 'canteen.n.01', 'synonyms': ['canteen'], 'id': 202, 'def': 'a flask for carrying water; used by soldiers or travelers', 'name': 'canteen'}, {'frequency': 'f', 'synset': 'cap.n.01', 'synonyms': ['cap_(headwear)'], 'id': 203, 'def': 'a tight-fitting headwear', 'name': 'cap_(headwear)'}, {'frequency': 'f', 'synset': 'cap.n.02', 'synonyms': ['bottle_cap', 'cap_(container_lid)'], 'id': 204, 'def': 'a top (as for a bottle)', 'name': 'bottle_cap'}, {'frequency': 'c', 'synset': 'cape.n.02', 'synonyms': ['cape'], 'id': 205, 'def': 'a sleeveless garment like a cloak but shorter', 'name': 'cape'}, {'frequency': 'c', 'synset': 'cappuccino.n.01', 'synonyms': ['cappuccino', 'coffee_cappuccino'], 'id': 206, 'def': 'equal parts of espresso and steamed milk', 'name': 'cappuccino'}, {'frequency': 'f', 'synset': 'car.n.01', 'synonyms': ['car_(automobile)', 'auto_(automobile)', 'automobile'], 'id': 207, 'def': 'a motor vehicle with four wheels', 'name': 'car_(automobile)'}, {'frequency': 'f', 'synset': 'car.n.02', 'synonyms': ['railcar_(part_of_a_train)', 'railway_car_(part_of_a_train)', 'railroad_car_(part_of_a_train)'], 'id': 208, 'def': 'a wheeled vehicle adapted to the rails of railroad (mark each individual railcar separately)', 'name': 'railcar_(part_of_a_train)'}, {'frequency': 'r', 'synset': 'car.n.04', 'synonyms': ['elevator_car'], 'id': 209, 'def': 'where passengers ride up and down', 'name': 'elevator_car'}, {'frequency': 'r', 'synset': 'car_battery.n.01', 'synonyms': ['car_battery', 'automobile_battery'], 'id': 210, 'def': 'a battery in a motor vehicle', 'name': 'car_battery'}, {'frequency': 'c', 'synset': 'card.n.02', 'synonyms': ['identity_card'], 'id': 211, 'def': 'a card certifying the identity of the bearer', 'name': 'identity_card'}, {'frequency': 'c', 'synset': 'card.n.03', 'synonyms': ['card'], 'id': 212, 'def': 'a rectangular piece of paper used to send messages (e.g. greetings or pictures)', 'name': 'card'}, {'frequency': 'c', 'synset': 'cardigan.n.01', 'synonyms': ['cardigan'], 'id': 213, 'def': 'knitted jacket that is fastened up the front with buttons or a zipper', 'name': 'cardigan'}, {'frequency': 'r', 'synset': 'cargo_ship.n.01', 'synonyms': ['cargo_ship', 'cargo_vessel'], 'id': 214, 'def': 'a ship designed to carry cargo', 'name': 'cargo_ship'}, {'frequency': 'r', 'synset': 'carnation.n.01', 'synonyms': ['carnation'], 'id': 215, 'def': 'plant with pink to purple-red spice-scented usually double flowers', 'name': 'carnation'}, {'frequency': 'c', 'synset': 'carriage.n.02', 'synonyms': ['horse_carriage'], 'id': 216, 'def': 'a vehicle with wheels drawn by one or more horses', 'name': 'horse_carriage'}, {'frequency': 'f', 'synset': 'carrot.n.01', 'synonyms': ['carrot'], 'id': 217, 'def': 'deep orange edible root of the cultivated carrot plant', 'name': 'carrot'}, {'frequency': 'f', 'synset': 'carryall.n.01', 'synonyms': ['tote_bag'], 'id': 218, 'def': 'a capacious bag or basket', 'name': 'tote_bag'}, {'frequency': 'c', 'synset': 'cart.n.01', 'synonyms': ['cart'], 'id': 219, 'def': 'a heavy open wagon usually having two wheels and drawn by an animal', 'name': 'cart'}, {'frequency': 'c', 'synset': 'carton.n.02', 'synonyms': ['carton'], 'id': 220, 'def': 'a container made of cardboard for holding food or drink', 'name': 'carton'}, {'frequency': 'c', 'synset': 'cash_register.n.01', 'synonyms': ['cash_register', 'register_(for_cash_transactions)'], 'id': 221, 'def': 'a cashbox with an adding machine to register transactions', 'name': 'cash_register'}, {'frequency': 'r', 'synset': 'casserole.n.01', 'synonyms': ['casserole'], 'id': 222, 'def': 'food cooked and served in a casserole', 'name': 'casserole'}, {'frequency': 'r', 'synset': 'cassette.n.01', 'synonyms': ['cassette'], 'id': 223, 'def': 'a container that holds a magnetic tape used for recording or playing sound or video', 'name': 'cassette'}, {'frequency': 'c', 'synset': 'cast.n.05', 'synonyms': ['cast', 'plaster_cast', 'plaster_bandage'], 'id': 224, 'def': 'bandage consisting of a firm covering that immobilizes broken bones while they heal', 'name': 'cast'}, {'frequency': 'f', 'synset': 'cat.n.01', 'synonyms': ['cat'], 'id': 225, 'def': 'a domestic house cat', 'name': 'cat'}, {'frequency': 'f', 'synset': 'cauliflower.n.02', 'synonyms': ['cauliflower'], 'id': 226, 'def': 'edible compact head of white undeveloped flowers', 'name': 'cauliflower'}, {'frequency': 'c', 'synset': 'cayenne.n.02', 'synonyms': ['cayenne_(spice)', 'cayenne_pepper_(spice)', 'red_pepper_(spice)'], 'id': 227, 'def': 'ground pods and seeds of pungent red peppers of the genus Capsicum', 'name': 'cayenne_(spice)'}, {'frequency': 'c', 'synset': 'cd_player.n.01', 'synonyms': ['CD_player'], 'id': 228, 'def': 'electronic equipment for playing compact discs (CDs)', 'name': 'CD_player'}, {'frequency': 'f', 'synset': 'celery.n.01', 'synonyms': ['celery'], 'id': 229, 'def': 'widely cultivated herb with aromatic leaf stalks that are eaten raw or cooked', 'name': 'celery'}, {'frequency': 'f', 'synset': 'cellular_telephone.n.01', 'synonyms': ['cellular_telephone', 'cellular_phone', 'cellphone', 'mobile_phone', 'smart_phone'], 'id': 230, 'def': 'a hand-held mobile telephone', 'name': 'cellular_telephone'}, {'frequency': 'r', 'synset': 'chain_mail.n.01', 'synonyms': ['chain_mail', 'ring_mail', 'chain_armor', 'chain_armour', 'ring_armor', 'ring_armour'], 'id': 231, 'def': '(Middle Ages) flexible armor made of interlinked metal rings', 'name': 'chain_mail'}, {'frequency': 'f', 'synset': 'chair.n.01', 'synonyms': ['chair'], 'id': 232, 'def': 'a seat for one person, with a support for the back', 'name': 'chair'}, {'frequency': 'r', 'synset': 'chaise_longue.n.01', 'synonyms': ['chaise_longue', 'chaise', 'daybed'], 'id': 233, 'def': 'a long chair; for reclining', 'name': 'chaise_longue'}, {'frequency': 'r', 'synset': 'chalice.n.01', 'synonyms': ['chalice'], 'id': 234, 'def': 'a bowl-shaped drinking vessel; especially the Eucharistic cup', 'name': 'chalice'}, {'frequency': 'f', 'synset': 'chandelier.n.01', 'synonyms': ['chandelier'], 'id': 235, 'def': 'branched lighting fixture; often ornate; hangs from the ceiling', 'name': 'chandelier'}, {'frequency': 'r', 'synset': 'chap.n.04', 'synonyms': ['chap'], 'id': 236, 'def': 'leather leggings without a seat; worn over trousers by cowboys to protect their legs', 'name': 'chap'}, {'frequency': 'r', 'synset': 'checkbook.n.01', 'synonyms': ['checkbook', 'chequebook'], 'id': 237, 'def': 'a book issued to holders of checking accounts', 'name': 'checkbook'}, {'frequency': 'r', 'synset': 'checkerboard.n.01', 'synonyms': ['checkerboard'], 'id': 238, 'def': 'a board having 64 squares of two alternating colors', 'name': 'checkerboard'}, {'frequency': 'c', 'synset': 'cherry.n.03', 'synonyms': ['cherry'], 'id': 239, 'def': 'a red fruit with a single hard stone', 'name': 'cherry'}, {'frequency': 'r', 'synset': 'chessboard.n.01', 'synonyms': ['chessboard'], 'id': 240, 'def': 'a checkerboard used to play chess', 'name': 'chessboard'}, {'frequency': 'c', 'synset': 'chicken.n.02', 'synonyms': ['chicken_(animal)'], 'id': 241, 'def': 'a domestic fowl bred for flesh or eggs', 'name': 'chicken_(animal)'}, {'frequency': 'c', 'synset': 'chickpea.n.01', 'synonyms': ['chickpea', 'garbanzo'], 'id': 242, 'def': 'the seed of the chickpea plant; usually dried', 'name': 'chickpea'}, {'frequency': 'c', 'synset': 'chili.n.02', 'synonyms': ['chili_(vegetable)', 'chili_pepper_(vegetable)', 'chilli_(vegetable)', 'chilly_(vegetable)', 'chile_(vegetable)'], 'id': 243, 'def': 'very hot and finely tapering pepper of special pungency', 'name': 'chili_(vegetable)'}, {'frequency': 'r', 'synset': 'chime.n.01', 'synonyms': ['chime', 'gong'], 'id': 244, 'def': 'an instrument consisting of a set of bells that are struck with a hammer', 'name': 'chime'}, {'frequency': 'r', 'synset': 'chinaware.n.01', 'synonyms': ['chinaware'], 'id': 245, 'def': 'dishware made of high quality porcelain', 'name': 'chinaware'}, {'frequency': 'c', 'synset': 'chip.n.04', 'synonyms': ['crisp_(potato_chip)', 'potato_chip'], 'id': 246, 'def': 'a thin crisp slice of potato fried in deep fat', 'name': 'crisp_(potato_chip)'}, {'frequency': 'r', 'synset': 'chip.n.06', 'synonyms': ['poker_chip'], 'id': 247, 'def': 'a small disk-shaped counter used to represent money when gambling', 'name': 'poker_chip'}, {'frequency': 'c', 'synset': 'chocolate_bar.n.01', 'synonyms': ['chocolate_bar'], 'id': 248, 'def': 'a bar of chocolate candy', 'name': 'chocolate_bar'}, {'frequency': 'c', 'synset': 'chocolate_cake.n.01', 'synonyms': ['chocolate_cake'], 'id': 249, 'def': 'cake containing chocolate', 'name': 'chocolate_cake'}, {'frequency': 'r', 'synset': 'chocolate_milk.n.01', 'synonyms': ['chocolate_milk'], 'id': 250, 'def': 'milk flavored with chocolate syrup', 'name': 'chocolate_milk'}, {'frequency': 'r', 'synset': 'chocolate_mousse.n.01', 'synonyms': ['chocolate_mousse'], 'id': 251, 'def': 'dessert mousse made with chocolate', 'name': 'chocolate_mousse'}, {'frequency': 'f', 'synset': 'choker.n.03', 'synonyms': ['choker', 'collar', 'neckband'], 'id': 252, 'def': 'shirt collar, animal collar, or tight-fitting necklace', 'name': 'choker'}, {'frequency': 'f', 'synset': 'chopping_board.n.01', 'synonyms': ['chopping_board', 'cutting_board', 'chopping_block'], 'id': 253, 'def': 'a wooden board where meats or vegetables can be cut', 'name': 'chopping_board'}, {'frequency': 'f', 'synset': 'chopstick.n.01', 'synonyms': ['chopstick'], 'id': 254, 'def': 'one of a pair of slender sticks used as oriental tableware to eat food with', 'name': 'chopstick'}, {'frequency': 'f', 'synset': 'christmas_tree.n.05', 'synonyms': ['Christmas_tree'], 'id': 255, 'def': 'an ornamented evergreen used as a Christmas decoration', 'name': 'Christmas_tree'}, {'frequency': 'c', 'synset': 'chute.n.02', 'synonyms': ['slide'], 'id': 256, 'def': 'sloping channel through which things can descend', 'name': 'slide'}, {'frequency': 'r', 'synset': 'cider.n.01', 'synonyms': ['cider', 'cyder'], 'id': 257, 'def': 'a beverage made from juice pressed from apples', 'name': 'cider'}, {'frequency': 'r', 'synset': 'cigar_box.n.01', 'synonyms': ['cigar_box'], 'id': 258, 'def': 'a box for holding cigars', 'name': 'cigar_box'}, {'frequency': 'f', 'synset': 'cigarette.n.01', 'synonyms': ['cigarette'], 'id': 259, 'def': 'finely ground tobacco wrapped in paper; for smoking', 'name': 'cigarette'}, {'frequency': 'c', 'synset': 'cigarette_case.n.01', 'synonyms': ['cigarette_case', 'cigarette_pack'], 'id': 260, 'def': 'a small flat case for holding cigarettes', 'name': 'cigarette_case'}, {'frequency': 'f', 'synset': 'cistern.n.02', 'synonyms': ['cistern', 'water_tank'], 'id': 261, 'def': 'a tank that holds the water used to flush a toilet', 'name': 'cistern'}, {'frequency': 'r', 'synset': 'clarinet.n.01', 'synonyms': ['clarinet'], 'id': 262, 'def': 'a single-reed instrument with a straight tube', 'name': 'clarinet'}, {'frequency': 'c', 'synset': 'clasp.n.01', 'synonyms': ['clasp'], 'id': 263, 'def': 'a fastener (as a buckle or hook) that is used to hold two things together', 'name': 'clasp'}, {'frequency': 'c', 'synset': 'cleansing_agent.n.01', 'synonyms': ['cleansing_agent', 'cleanser', 'cleaner'], 'id': 264, 'def': 'a preparation used in cleaning something', 'name': 'cleansing_agent'}, {'frequency': 'r', 'synset': 'cleat.n.02', 'synonyms': ['cleat_(for_securing_rope)'], 'id': 265, 'def': 'a fastener (usually with two projecting horns) around which a rope can be secured', 'name': 'cleat_(for_securing_rope)'}, {'frequency': 'r', 'synset': 'clementine.n.01', 'synonyms': ['clementine'], 'id': 266, 'def': 'a variety of mandarin orange', 'name': 'clementine'}, {'frequency': 'c', 'synset': 'clip.n.03', 'synonyms': ['clip'], 'id': 267, 'def': 'any of various small fasteners used to hold loose articles together', 'name': 'clip'}, {'frequency': 'c', 'synset': 'clipboard.n.01', 'synonyms': ['clipboard'], 'id': 268, 'def': 'a small writing board with a clip at the top for holding papers', 'name': 'clipboard'}, {'frequency': 'r', 'synset': 'clipper.n.03', 'synonyms': ['clippers_(for_plants)'], 'id': 269, 'def': 'shears for cutting grass or shrubbery (often used in the plural)', 'name': 'clippers_(for_plants)'}, {'frequency': 'r', 'synset': 'cloak.n.02', 'synonyms': ['cloak'], 'id': 270, 'def': 'a loose outer garment', 'name': 'cloak'}, {'frequency': 'f', 'synset': 'clock.n.01', 'synonyms': ['clock', 'timepiece', 'timekeeper'], 'id': 271, 'def': 'a timepiece that shows the time of day', 'name': 'clock'}, {'frequency': 'f', 'synset': 'clock_tower.n.01', 'synonyms': ['clock_tower'], 'id': 272, 'def': 'a tower with a large clock visible high up on an outside face', 'name': 'clock_tower'}, {'frequency': 'c', 'synset': 'clothes_hamper.n.01', 'synonyms': ['clothes_hamper', 'laundry_basket', 'clothes_basket'], 'id': 273, 'def': 'a hamper that holds dirty clothes to be washed or wet clothes to be dried', 'name': 'clothes_hamper'}, {'frequency': 'c', 'synset': 'clothespin.n.01', 'synonyms': ['clothespin', 'clothes_peg'], 'id': 274, 'def': 'wood or plastic fastener; for holding clothes on a clothesline', 'name': 'clothespin'}, {'frequency': 'r', 'synset': 'clutch_bag.n.01', 'synonyms': ['clutch_bag'], 'id': 275, 'def': "a woman's strapless purse that is carried in the hand", 'name': 'clutch_bag'}, {'frequency': 'f', 'synset': 'coaster.n.03', 'synonyms': ['coaster'], 'id': 276, 'def': 'a covering (plate or mat) that protects the surface of a table', 'name': 'coaster'}, {'frequency': 'f', 'synset': 'coat.n.01', 'synonyms': ['coat'], 'id': 277, 'def': 'an outer garment that has sleeves and covers the body from shoulder down', 'name': 'coat'}, {'frequency': 'c', 'synset': 'coat_hanger.n.01', 'synonyms': ['coat_hanger', 'clothes_hanger', 'dress_hanger'], 'id': 278, 'def': "a hanger that is shaped like a person's shoulders", 'name': 'coat_hanger'}, {'frequency': 'c', 'synset': 'coatrack.n.01', 'synonyms': ['coatrack', 'hatrack'], 'id': 279, 'def': 'a rack with hooks for temporarily holding coats and hats', 'name': 'coatrack'}, {'frequency': 'c', 'synset': 'cock.n.04', 'synonyms': ['cock', 'rooster'], 'id': 280, 'def': 'adult male chicken', 'name': 'cock'}, {'frequency': 'r', 'synset': 'cockroach.n.01', 'synonyms': ['cockroach'], 'id': 281, 'def': 'any of numerous chiefly nocturnal insects; some are domestic pests', 'name': 'cockroach'}, {'frequency': 'r', 'synset': 'cocoa.n.01', 'synonyms': ['cocoa_(beverage)', 'hot_chocolate_(beverage)', 'drinking_chocolate'], 'id': 282, 'def': 'a beverage made from cocoa powder and milk and sugar; usually drunk hot', 'name': 'cocoa_(beverage)'}, {'frequency': 'c', 'synset': 'coconut.n.02', 'synonyms': ['coconut', 'cocoanut'], 'id': 283, 'def': 'large hard-shelled brown oval nut with a fibrous husk', 'name': 'coconut'}, {'frequency': 'f', 'synset': 'coffee_maker.n.01', 'synonyms': ['coffee_maker', 'coffee_machine'], 'id': 284, 'def': 'a kitchen appliance for brewing coffee automatically', 'name': 'coffee_maker'}, {'frequency': 'f', 'synset': 'coffee_table.n.01', 'synonyms': ['coffee_table', 'cocktail_table'], 'id': 285, 'def': 'low table where magazines can be placed and coffee or cocktails are served', 'name': 'coffee_table'}, {'frequency': 'c', 'synset': 'coffeepot.n.01', 'synonyms': ['coffeepot'], 'id': 286, 'def': 'tall pot in which coffee is brewed', 'name': 'coffeepot'}, {'frequency': 'r', 'synset': 'coil.n.05', 'synonyms': ['coil'], 'id': 287, 'def': 'tubing that is wound in a spiral', 'name': 'coil'}, {'frequency': 'c', 'synset': 'coin.n.01', 'synonyms': ['coin'], 'id': 288, 'def': 'a flat metal piece (usually a disc) used as money', 'name': 'coin'}, {'frequency': 'c', 'synset': 'colander.n.01', 'synonyms': ['colander', 'cullender'], 'id': 289, 'def': 'bowl-shaped strainer; used to wash or drain foods', 'name': 'colander'}, {'frequency': 'c', 'synset': 'coleslaw.n.01', 'synonyms': ['coleslaw', 'slaw'], 'id': 290, 'def': 'basically shredded cabbage', 'name': 'coleslaw'}, {'frequency': 'r', 'synset': 'coloring_material.n.01', 'synonyms': ['coloring_material', 'colouring_material'], 'id': 291, 'def': 'any material used for its color', 'name': 'coloring_material'}, {'frequency': 'r', 'synset': 'combination_lock.n.01', 'synonyms': ['combination_lock'], 'id': 292, 'def': 'lock that can be opened only by turning dials in a special sequence', 'name': 'combination_lock'}, {'frequency': 'c', 'synset': 'comforter.n.04', 'synonyms': ['pacifier', 'teething_ring'], 'id': 293, 'def': 'device used for an infant to suck or bite on', 'name': 'pacifier'}, {'frequency': 'r', 'synset': 'comic_book.n.01', 'synonyms': ['comic_book'], 'id': 294, 'def': 'a magazine devoted to comic strips', 'name': 'comic_book'}, {'frequency': 'r', 'synset': 'compass.n.01', 'synonyms': ['compass'], 'id': 295, 'def': 'navigational instrument for finding directions', 'name': 'compass'}, {'frequency': 'f', 'synset': 'computer_keyboard.n.01', 'synonyms': ['computer_keyboard', 'keyboard_(computer)'], 'id': 296, 'def': 'a keyboard that is a data input device for computers', 'name': 'computer_keyboard'}, {'frequency': 'f', 'synset': 'condiment.n.01', 'synonyms': ['condiment'], 'id': 297, 'def': 'a preparation (a sauce or relish or spice) to enhance flavor or enjoyment', 'name': 'condiment'}, {'frequency': 'f', 'synset': 'cone.n.01', 'synonyms': ['cone', 'traffic_cone'], 'id': 298, 'def': 'a cone-shaped object used to direct traffic', 'name': 'cone'}, {'frequency': 'f', 'synset': 'control.n.09', 'synonyms': ['control', 'controller'], 'id': 299, 'def': 'a mechanism that controls the operation of a machine', 'name': 'control'}, {'frequency': 'r', 'synset': 'convertible.n.01', 'synonyms': ['convertible_(automobile)'], 'id': 300, 'def': 'a car that has top that can be folded or removed', 'name': 'convertible_(automobile)'}, {'frequency': 'r', 'synset': 'convertible.n.03', 'synonyms': ['sofa_bed'], 'id': 301, 'def': 'a sofa that can be converted into a bed', 'name': 'sofa_bed'}, {'frequency': 'r', 'synset': 'cooker.n.01', 'synonyms': ['cooker'], 'id': 302, 'def': 'a utensil for cooking', 'name': 'cooker'}, {'frequency': 'f', 'synset': 'cookie.n.01', 'synonyms': ['cookie', 'cooky', 'biscuit_(cookie)'], 'id': 303, 'def': "any of various small flat sweet cakes (`biscuit' is the British term)", 'name': 'cookie'}, {'frequency': 'r', 'synset': 'cooking_utensil.n.01', 'synonyms': ['cooking_utensil'], 'id': 304, 'def': 'a kitchen utensil made of material that does not melt easily; used for cooking', 'name': 'cooking_utensil'}, {'frequency': 'f', 'synset': 'cooler.n.01', 'synonyms': ['cooler_(for_food)', 'ice_chest'], 'id': 305, 'def': 'an insulated box for storing food often with ice', 'name': 'cooler_(for_food)'}, {'frequency': 'f', 'synset': 'cork.n.04', 'synonyms': ['cork_(bottle_plug)', 'bottle_cork'], 'id': 306, 'def': 'the plug in the mouth of a bottle (especially a wine bottle)', 'name': 'cork_(bottle_plug)'}, {'frequency': 'r', 'synset': 'corkboard.n.01', 'synonyms': ['corkboard'], 'id': 307, 'def': 'a sheet consisting of cork granules', 'name': 'corkboard'}, {'frequency': 'c', 'synset': 'corkscrew.n.01', 'synonyms': ['corkscrew', 'bottle_screw'], 'id': 308, 'def': 'a bottle opener that pulls corks', 'name': 'corkscrew'}, {'frequency': 'f', 'synset': 'corn.n.03', 'synonyms': ['edible_corn', 'corn', 'maize'], 'id': 309, 'def': 'ears or kernels of corn that can be prepared and served for human food (only mark individual ears or kernels)', 'name': 'edible_corn'}, {'frequency': 'r', 'synset': 'cornbread.n.01', 'synonyms': ['cornbread'], 'id': 310, 'def': 'bread made primarily of cornmeal', 'name': 'cornbread'}, {'frequency': 'c', 'synset': 'cornet.n.01', 'synonyms': ['cornet', 'horn', 'trumpet'], 'id': 311, 'def': 'a brass musical instrument with a narrow tube and a flared bell and many valves', 'name': 'cornet'}, {'frequency': 'c', 'synset': 'cornice.n.01', 'synonyms': ['cornice', 'valance', 'valance_board', 'pelmet'], 'id': 312, 'def': 'a decorative framework to conceal curtain fixtures at the top of a window casing', 'name': 'cornice'}, {'frequency': 'r', 'synset': 'cornmeal.n.01', 'synonyms': ['cornmeal'], 'id': 313, 'def': 'coarsely ground corn', 'name': 'cornmeal'}, {'frequency': 'c', 'synset': 'corset.n.01', 'synonyms': ['corset', 'girdle'], 'id': 314, 'def': "a woman's close-fitting foundation garment", 'name': 'corset'}, {'frequency': 'c', 'synset': 'costume.n.04', 'synonyms': ['costume'], 'id': 315, 'def': 'the attire characteristic of a country or a time or a social class', 'name': 'costume'}, {'frequency': 'r', 'synset': 'cougar.n.01', 'synonyms': ['cougar', 'puma', 'catamount', 'mountain_lion', 'panther'], 'id': 316, 'def': 'large American feline resembling a lion', 'name': 'cougar'}, {'frequency': 'r', 'synset': 'coverall.n.01', 'synonyms': ['coverall'], 'id': 317, 'def': 'a loose-fitting protective garment that is worn over other clothing', 'name': 'coverall'}, {'frequency': 'c', 'synset': 'cowbell.n.01', 'synonyms': ['cowbell'], 'id': 318, 'def': 'a bell hung around the neck of cow so that the cow can be easily located', 'name': 'cowbell'}, {'frequency': 'f', 'synset': 'cowboy_hat.n.01', 'synonyms': ['cowboy_hat', 'ten-gallon_hat'], 'id': 319, 'def': 'a hat with a wide brim and a soft crown; worn by American ranch hands', 'name': 'cowboy_hat'}, {'frequency': 'c', 'synset': 'crab.n.01', 'synonyms': ['crab_(animal)'], 'id': 320, 'def': 'decapod having eyes on short stalks and a broad flattened shell and pincers', 'name': 'crab_(animal)'}, {'frequency': 'r', 'synset': 'crab.n.05', 'synonyms': ['crabmeat'], 'id': 321, 'def': 'the edible flesh of any of various crabs', 'name': 'crabmeat'}, {'frequency': 'c', 'synset': 'cracker.n.01', 'synonyms': ['cracker'], 'id': 322, 'def': 'a thin crisp wafer', 'name': 'cracker'}, {'frequency': 'r', 'synset': 'crape.n.01', 'synonyms': ['crape', 'crepe', 'French_pancake'], 'id': 323, 'def': 'small very thin pancake', 'name': 'crape'}, {'frequency': 'f', 'synset': 'crate.n.01', 'synonyms': ['crate'], 'id': 324, 'def': 'a rugged box (usually made of wood); used for shipping', 'name': 'crate'}, {'frequency': 'c', 'synset': 'crayon.n.01', 'synonyms': ['crayon', 'wax_crayon'], 'id': 325, 'def': 'writing or drawing implement made of a colored stick of composition wax', 'name': 'crayon'}, {'frequency': 'r', 'synset': 'cream_pitcher.n.01', 'synonyms': ['cream_pitcher'], 'id': 326, 'def': 'a small pitcher for serving cream', 'name': 'cream_pitcher'}, {'frequency': 'c', 'synset': 'crescent_roll.n.01', 'synonyms': ['crescent_roll', 'croissant'], 'id': 327, 'def': 'very rich flaky crescent-shaped roll', 'name': 'crescent_roll'}, {'frequency': 'c', 'synset': 'crib.n.01', 'synonyms': ['crib', 'cot'], 'id': 328, 'def': 'baby bed with high sides made of slats', 'name': 'crib'}, {'frequency': 'c', 'synset': 'crock.n.03', 'synonyms': ['crock_pot', 'earthenware_jar'], 'id': 329, 'def': 'an earthen jar (made of baked clay) or a modern electric crockpot', 'name': 'crock_pot'}, {'frequency': 'f', 'synset': 'crossbar.n.01', 'synonyms': ['crossbar'], 'id': 330, 'def': 'a horizontal bar that goes across something', 'name': 'crossbar'}, {'frequency': 'r', 'synset': 'crouton.n.01', 'synonyms': ['crouton'], 'id': 331, 'def': 'a small piece of toasted or fried bread; served in soup or salads', 'name': 'crouton'}, {'frequency': 'c', 'synset': 'crow.n.01', 'synonyms': ['crow'], 'id': 332, 'def': 'black birds having a raucous call', 'name': 'crow'}, {'frequency': 'r', 'synset': 'crowbar.n.01', 'synonyms': ['crowbar', 'wrecking_bar', 'pry_bar'], 'id': 333, 'def': 'a heavy iron lever with one end forged into a wedge', 'name': 'crowbar'}, {'frequency': 'c', 'synset': 'crown.n.04', 'synonyms': ['crown'], 'id': 334, 'def': 'an ornamental jeweled headdress signifying sovereignty', 'name': 'crown'}, {'frequency': 'c', 'synset': 'crucifix.n.01', 'synonyms': ['crucifix'], 'id': 335, 'def': 'representation of the cross on which Jesus died', 'name': 'crucifix'}, {'frequency': 'c', 'synset': 'cruise_ship.n.01', 'synonyms': ['cruise_ship', 'cruise_liner'], 'id': 336, 'def': 'a passenger ship used commercially for pleasure cruises', 'name': 'cruise_ship'}, {'frequency': 'c', 'synset': 'cruiser.n.01', 'synonyms': ['police_cruiser', 'patrol_car', 'police_car', 'squad_car'], 'id': 337, 'def': 'a car in which policemen cruise the streets', 'name': 'police_cruiser'}, {'frequency': 'f', 'synset': 'crumb.n.03', 'synonyms': ['crumb'], 'id': 338, 'def': 'small piece of e.g. bread or cake', 'name': 'crumb'}, {'frequency': 'c', 'synset': 'crutch.n.01', 'synonyms': ['crutch'], 'id': 339, 'def': 'a wooden or metal staff that fits under the armpit and reaches to the ground', 'name': 'crutch'}, {'frequency': 'c', 'synset': 'cub.n.03', 'synonyms': ['cub_(animal)'], 'id': 340, 'def': 'the young of certain carnivorous mammals such as the bear or wolf or lion', 'name': 'cub_(animal)'}, {'frequency': 'c', 'synset': 'cube.n.05', 'synonyms': ['cube', 'square_block'], 'id': 341, 'def': 'a block in the (approximate) shape of a cube', 'name': 'cube'}, {'frequency': 'f', 'synset': 'cucumber.n.02', 'synonyms': ['cucumber', 'cuke'], 'id': 342, 'def': 'cylindrical green fruit with thin green rind and white flesh eaten as a vegetable', 'name': 'cucumber'}, {'frequency': 'c', 'synset': 'cufflink.n.01', 'synonyms': ['cufflink'], 'id': 343, 'def': 'jewelry consisting of linked buttons used to fasten the cuffs of a shirt', 'name': 'cufflink'}, {'frequency': 'f', 'synset': 'cup.n.01', 'synonyms': ['cup'], 'id': 344, 'def': 'a small open container usually used for drinking; usually has a handle', 'name': 'cup'}, {'frequency': 'c', 'synset': 'cup.n.08', 'synonyms': ['trophy_cup'], 'id': 345, 'def': 'a metal award or cup-shaped vessel with handles that is awarded as a trophy to a competition winner', 'name': 'trophy_cup'}, {'frequency': 'f', 'synset': 'cupboard.n.01', 'synonyms': ['cupboard', 'closet'], 'id': 346, 'def': 'a small room (or recess) or cabinet used for storage space', 'name': 'cupboard'}, {'frequency': 'f', 'synset': 'cupcake.n.01', 'synonyms': ['cupcake'], 'id': 347, 'def': 'small cake baked in a muffin tin', 'name': 'cupcake'}, {'frequency': 'r', 'synset': 'curler.n.01', 'synonyms': ['hair_curler', 'hair_roller', 'hair_crimper'], 'id': 348, 'def': 'a cylindrical tube around which the hair is wound to curl it', 'name': 'hair_curler'}, {'frequency': 'r', 'synset': 'curling_iron.n.01', 'synonyms': ['curling_iron'], 'id': 349, 'def': 'a cylindrical home appliance that heats hair that has been curled around it', 'name': 'curling_iron'}, {'frequency': 'f', 'synset': 'curtain.n.01', 'synonyms': ['curtain', 'drapery'], 'id': 350, 'def': 'hanging cloth used as a blind (especially for a window)', 'name': 'curtain'}, {'frequency': 'f', 'synset': 'cushion.n.03', 'synonyms': ['cushion'], 'id': 351, 'def': 'a soft bag filled with air or padding such as feathers or foam rubber', 'name': 'cushion'}, {'frequency': 'r', 'synset': 'cylinder.n.04', 'synonyms': ['cylinder'], 'id': 352, 'def': 'a cylindrical container', 'name': 'cylinder'}, {'frequency': 'r', 'synset': 'cymbal.n.01', 'synonyms': ['cymbal'], 'id': 353, 'def': 'a percussion instrument consisting of a concave brass disk', 'name': 'cymbal'}, {'frequency': 'r', 'synset': 'dagger.n.01', 'synonyms': ['dagger'], 'id': 354, 'def': 'a short knife with a pointed blade used for piercing or stabbing', 'name': 'dagger'}, {'frequency': 'r', 'synset': 'dalmatian.n.02', 'synonyms': ['dalmatian'], 'id': 355, 'def': 'a large breed having a smooth white coat with black or brown spots', 'name': 'dalmatian'}, {'frequency': 'c', 'synset': 'dartboard.n.01', 'synonyms': ['dartboard'], 'id': 356, 'def': 'a circular board of wood or cork used as the target in the game of darts', 'name': 'dartboard'}, {'frequency': 'r', 'synset': 'date.n.08', 'synonyms': ['date_(fruit)'], 'id': 357, 'def': 'sweet edible fruit of the date palm with a single long woody seed', 'name': 'date_(fruit)'}, {'frequency': 'f', 'synset': 'deck_chair.n.01', 'synonyms': ['deck_chair', 'beach_chair'], 'id': 358, 'def': 'a folding chair for use outdoors; a wooden frame supports a length of canvas', 'name': 'deck_chair'}, {'frequency': 'c', 'synset': 'deer.n.01', 'synonyms': ['deer', 'cervid'], 'id': 359, 'def': "distinguished from Bovidae by the male's having solid deciduous antlers", 'name': 'deer'}, {'frequency': 'c', 'synset': 'dental_floss.n.01', 'synonyms': ['dental_floss', 'floss'], 'id': 360, 'def': 'a soft thread for cleaning the spaces between the teeth', 'name': 'dental_floss'}, {'frequency': 'f', 'synset': 'desk.n.01', 'synonyms': ['desk'], 'id': 361, 'def': 'a piece of furniture with a writing surface and usually drawers or other compartments', 'name': 'desk'}, {'frequency': 'r', 'synset': 'detergent.n.01', 'synonyms': ['detergent'], 'id': 362, 'def': 'a surface-active chemical widely used in industry and laundering', 'name': 'detergent'}, {'frequency': 'c', 'synset': 'diaper.n.01', 'synonyms': ['diaper'], 'id': 363, 'def': 'garment consisting of a folded cloth drawn up between the legs and fastened at the waist', 'name': 'diaper'}, {'frequency': 'r', 'synset': 'diary.n.01', 'synonyms': ['diary', 'journal'], 'id': 364, 'def': 'yearly planner book', 'name': 'diary'}, {'frequency': 'r', 'synset': 'die.n.01', 'synonyms': ['die', 'dice'], 'id': 365, 'def': 'a small cube with 1 to 6 spots on the six faces; used in gambling', 'name': 'die'}, {'frequency': 'r', 'synset': 'dinghy.n.01', 'synonyms': ['dinghy', 'dory', 'rowboat'], 'id': 366, 'def': 'a small boat of shallow draft with seats and oars with which it is propelled', 'name': 'dinghy'}, {'frequency': 'f', 'synset': 'dining_table.n.01', 'synonyms': ['dining_table'], 'id': 367, 'def': 'a table at which meals are served', 'name': 'dining_table'}, {'frequency': 'r', 'synset': 'dinner_jacket.n.01', 'synonyms': ['tux', 'tuxedo'], 'id': 368, 'def': 'semiformal evening dress for men', 'name': 'tux'}, {'frequency': 'f', 'synset': 'dish.n.01', 'synonyms': ['dish'], 'id': 369, 'def': 'a piece of dishware normally used as a container for holding or serving food', 'name': 'dish'}, {'frequency': 'c', 'synset': 'dish.n.05', 'synonyms': ['dish_antenna'], 'id': 370, 'def': 'directional antenna consisting of a parabolic reflector', 'name': 'dish_antenna'}, {'frequency': 'c', 'synset': 'dishrag.n.01', 'synonyms': ['dishrag', 'dishcloth'], 'id': 371, 'def': 'a cloth for washing dishes or cleaning in general', 'name': 'dishrag'}, {'frequency': 'f', 'synset': 'dishtowel.n.01', 'synonyms': ['dishtowel', 'tea_towel'], 'id': 372, 'def': 'a towel for drying dishes', 'name': 'dishtowel'}, {'frequency': 'f', 'synset': 'dishwasher.n.01', 'synonyms': ['dishwasher', 'dishwashing_machine'], 'id': 373, 'def': 'a machine for washing dishes', 'name': 'dishwasher'}, {'frequency': 'r', 'synset': 'dishwasher_detergent.n.01', 'synonyms': ['dishwasher_detergent', 'dishwashing_detergent', 'dishwashing_liquid', 'dishsoap'], 'id': 374, 'def': 'dishsoap or dish detergent designed for use in dishwashers', 'name': 'dishwasher_detergent'}, {'frequency': 'f', 'synset': 'dispenser.n.01', 'synonyms': ['dispenser'], 'id': 375, 'def': 'a container so designed that the contents can be used in prescribed amounts', 'name': 'dispenser'}, {'frequency': 'r', 'synset': 'diving_board.n.01', 'synonyms': ['diving_board'], 'id': 376, 'def': 'a springboard from which swimmers can dive', 'name': 'diving_board'}, {'frequency': 'f', 'synset': 'dixie_cup.n.01', 'synonyms': ['Dixie_cup', 'paper_cup'], 'id': 377, 'def': 'a disposable cup made of paper; for holding drinks', 'name': 'Dixie_cup'}, {'frequency': 'f', 'synset': 'dog.n.01', 'synonyms': ['dog'], 'id': 378, 'def': 'a common domesticated dog', 'name': 'dog'}, {'frequency': 'f', 'synset': 'dog_collar.n.01', 'synonyms': ['dog_collar'], 'id': 379, 'def': 'a collar for a dog', 'name': 'dog_collar'}, {'frequency': 'f', 'synset': 'doll.n.01', 'synonyms': ['doll'], 'id': 380, 'def': 'a toy replica of a HUMAN (NOT AN ANIMAL)', 'name': 'doll'}, {'frequency': 'r', 'synset': 'dollar.n.02', 'synonyms': ['dollar', 'dollar_bill', 'one_dollar_bill'], 'id': 381, 'def': 'a piece of paper money worth one dollar', 'name': 'dollar'}, {'frequency': 'r', 'synset': 'dollhouse.n.01', 'synonyms': ['dollhouse', "doll's_house"], 'id': 382, 'def': "a house so small that it is likened to a child's plaything", 'name': 'dollhouse'}, {'frequency': 'c', 'synset': 'dolphin.n.02', 'synonyms': ['dolphin'], 'id': 383, 'def': 'any of various small toothed whales with a beaklike snout; larger than porpoises', 'name': 'dolphin'}, {'frequency': 'c', 'synset': 'domestic_ass.n.01', 'synonyms': ['domestic_ass', 'donkey'], 'id': 384, 'def': 'domestic beast of burden descended from the African wild ass; patient but stubborn', 'name': 'domestic_ass'}, {'frequency': 'f', 'synset': 'doorknob.n.01', 'synonyms': ['doorknob', 'doorhandle'], 'id': 385, 'def': "a knob used to open a door (often called `doorhandle' in Great Britain)", 'name': 'doorknob'}, {'frequency': 'c', 'synset': 'doormat.n.02', 'synonyms': ['doormat', 'welcome_mat'], 'id': 386, 'def': 'a mat placed outside an exterior door for wiping the shoes before entering', 'name': 'doormat'}, {'frequency': 'f', 'synset': 'doughnut.n.02', 'synonyms': ['doughnut', 'donut'], 'id': 387, 'def': 'a small ring-shaped friedcake', 'name': 'doughnut'}, {'frequency': 'r', 'synset': 'dove.n.01', 'synonyms': ['dove'], 'id': 388, 'def': 'any of numerous small pigeons', 'name': 'dove'}, {'frequency': 'r', 'synset': 'dragonfly.n.01', 'synonyms': ['dragonfly'], 'id': 389, 'def': 'slender-bodied non-stinging insect having iridescent wings that are outspread at rest', 'name': 'dragonfly'}, {'frequency': 'f', 'synset': 'drawer.n.01', 'synonyms': ['drawer'], 'id': 390, 'def': 'a boxlike container in a piece of furniture; made so as to slide in and out', 'name': 'drawer'}, {'frequency': 'c', 'synset': 'drawers.n.01', 'synonyms': ['underdrawers', 'boxers', 'boxershorts'], 'id': 391, 'def': 'underpants worn by men', 'name': 'underdrawers'}, {'frequency': 'f', 'synset': 'dress.n.01', 'synonyms': ['dress', 'frock'], 'id': 392, 'def': 'a one-piece garment for a woman; has skirt and bodice', 'name': 'dress'}, {'frequency': 'c', 'synset': 'dress_hat.n.01', 'synonyms': ['dress_hat', 'high_hat', 'opera_hat', 'silk_hat', 'top_hat'], 'id': 393, 'def': "a man's hat with a tall crown; usually covered with silk or with beaver fur", 'name': 'dress_hat'}, {'frequency': 'f', 'synset': 'dress_suit.n.01', 'synonyms': ['dress_suit'], 'id': 394, 'def': 'formalwear consisting of full evening dress for men', 'name': 'dress_suit'}, {'frequency': 'f', 'synset': 'dresser.n.05', 'synonyms': ['dresser'], 'id': 395, 'def': 'a cabinet with shelves', 'name': 'dresser'}, {'frequency': 'c', 'synset': 'drill.n.01', 'synonyms': ['drill'], 'id': 396, 'def': 'a tool with a sharp rotating point for making holes in hard materials', 'name': 'drill'}, {'frequency': 'r', 'synset': 'drone.n.04', 'synonyms': ['drone'], 'id': 397, 'def': 'an aircraft without a pilot that is operated by remote control', 'name': 'drone'}, {'frequency': 'r', 'synset': 'dropper.n.01', 'synonyms': ['dropper', 'eye_dropper'], 'id': 398, 'def': 'pipet consisting of a small tube with a vacuum bulb at one end for drawing liquid in and releasing it a drop at a time', 'name': 'dropper'}, {'frequency': 'c', 'synset': 'drum.n.01', 'synonyms': ['drum_(musical_instrument)'], 'id': 399, 'def': 'a musical percussion instrument; usually consists of a hollow cylinder with a membrane stretched across each end', 'name': 'drum_(musical_instrument)'}, {'frequency': 'r', 'synset': 'drumstick.n.02', 'synonyms': ['drumstick'], 'id': 400, 'def': 'a stick used for playing a drum', 'name': 'drumstick'}, {'frequency': 'f', 'synset': 'duck.n.01', 'synonyms': ['duck'], 'id': 401, 'def': 'small web-footed broad-billed swimming bird', 'name': 'duck'}, {'frequency': 'c', 'synset': 'duckling.n.02', 'synonyms': ['duckling'], 'id': 402, 'def': 'young duck', 'name': 'duckling'}, {'frequency': 'c', 'synset': 'duct_tape.n.01', 'synonyms': ['duct_tape'], 'id': 403, 'def': 'a wide silvery adhesive tape', 'name': 'duct_tape'}, {'frequency': 'f', 'synset': 'duffel_bag.n.01', 'synonyms': ['duffel_bag', 'duffle_bag', 'duffel', 'duffle'], 'id': 404, 'def': 'a large cylindrical bag of heavy cloth (does not include suitcases)', 'name': 'duffel_bag'}, {'frequency': 'r', 'synset': 'dumbbell.n.01', 'synonyms': ['dumbbell'], 'id': 405, 'def': 'an exercising weight with two ball-like ends connected by a short handle', 'name': 'dumbbell'}, {'frequency': 'c', 'synset': 'dumpster.n.01', 'synonyms': ['dumpster'], 'id': 406, 'def': 'a container designed to receive and transport and dump waste', 'name': 'dumpster'}, {'frequency': 'r', 'synset': 'dustpan.n.02', 'synonyms': ['dustpan'], 'id': 407, 'def': 'a short-handled receptacle into which dust can be swept', 'name': 'dustpan'}, {'frequency': 'c', 'synset': 'eagle.n.01', 'synonyms': ['eagle'], 'id': 408, 'def': 'large birds of prey noted for their broad wings and strong soaring flight', 'name': 'eagle'}, {'frequency': 'f', 'synset': 'earphone.n.01', 'synonyms': ['earphone', 'earpiece', 'headphone'], 'id': 409, 'def': 'device for listening to audio that is held over or inserted into the ear', 'name': 'earphone'}, {'frequency': 'r', 'synset': 'earplug.n.01', 'synonyms': ['earplug'], 'id': 410, 'def': 'a soft plug that is inserted into the ear canal to block sound', 'name': 'earplug'}, {'frequency': 'f', 'synset': 'earring.n.01', 'synonyms': ['earring'], 'id': 411, 'def': 'jewelry to ornament the ear', 'name': 'earring'}, {'frequency': 'c', 'synset': 'easel.n.01', 'synonyms': ['easel'], 'id': 412, 'def': "an upright tripod for displaying something (usually an artist's canvas)", 'name': 'easel'}, {'frequency': 'r', 'synset': 'eclair.n.01', 'synonyms': ['eclair'], 'id': 413, 'def': 'oblong cream puff', 'name': 'eclair'}, {'frequency': 'r', 'synset': 'eel.n.01', 'synonyms': ['eel'], 'id': 414, 'def': 'an elongate fish with fatty flesh', 'name': 'eel'}, {'frequency': 'f', 'synset': 'egg.n.02', 'synonyms': ['egg', 'eggs'], 'id': 415, 'def': 'oval reproductive body of a fowl (especially a hen) used as food', 'name': 'egg'}, {'frequency': 'r', 'synset': 'egg_roll.n.01', 'synonyms': ['egg_roll', 'spring_roll'], 'id': 416, 'def': 'minced vegetables and meat wrapped in a pancake and fried', 'name': 'egg_roll'}, {'frequency': 'c', 'synset': 'egg_yolk.n.01', 'synonyms': ['egg_yolk', 'yolk_(egg)'], 'id': 417, 'def': 'the yellow spherical part of an egg', 'name': 'egg_yolk'}, {'frequency': 'c', 'synset': 'eggbeater.n.02', 'synonyms': ['eggbeater', 'eggwhisk'], 'id': 418, 'def': 'a mixer for beating eggs or whipping cream', 'name': 'eggbeater'}, {'frequency': 'c', 'synset': 'eggplant.n.01', 'synonyms': ['eggplant', 'aubergine'], 'id': 419, 'def': 'egg-shaped vegetable having a shiny skin typically dark purple', 'name': 'eggplant'}, {'frequency': 'r', 'synset': 'electric_chair.n.01', 'synonyms': ['electric_chair'], 'id': 420, 'def': 'a chair-shaped instrument of execution by electrocution', 'name': 'electric_chair'}, {'frequency': 'f', 'synset': 'electric_refrigerator.n.01', 'synonyms': ['refrigerator'], 'id': 421, 'def': 'a refrigerator in which the coolant is pumped around by an electric motor', 'name': 'refrigerator'}, {'frequency': 'f', 'synset': 'elephant.n.01', 'synonyms': ['elephant'], 'id': 422, 'def': 'a common elephant', 'name': 'elephant'}, {'frequency': 'c', 'synset': 'elk.n.01', 'synonyms': ['elk', 'moose'], 'id': 423, 'def': 'large northern deer with enormous flattened antlers in the male', 'name': 'elk'}, {'frequency': 'c', 'synset': 'envelope.n.01', 'synonyms': ['envelope'], 'id': 424, 'def': 'a flat (usually rectangular) container for a letter, thin package, etc.', 'name': 'envelope'}, {'frequency': 'c', 'synset': 'eraser.n.01', 'synonyms': ['eraser'], 'id': 425, 'def': 'an implement used to erase something', 'name': 'eraser'}, {'frequency': 'r', 'synset': 'escargot.n.01', 'synonyms': ['escargot'], 'id': 426, 'def': 'edible snail usually served in the shell with a sauce of melted butter and garlic', 'name': 'escargot'}, {'frequency': 'r', 'synset': 'eyepatch.n.01', 'synonyms': ['eyepatch'], 'id': 427, 'def': 'a protective cloth covering for an injured eye', 'name': 'eyepatch'}, {'frequency': 'r', 'synset': 'falcon.n.01', 'synonyms': ['falcon'], 'id': 428, 'def': 'birds of prey having long pointed powerful wings adapted for swift flight', 'name': 'falcon'}, {'frequency': 'f', 'synset': 'fan.n.01', 'synonyms': ['fan'], 'id': 429, 'def': 'a device for creating a current of air by movement of a surface or surfaces', 'name': 'fan'}, {'frequency': 'f', 'synset': 'faucet.n.01', 'synonyms': ['faucet', 'spigot', 'tap'], 'id': 430, 'def': 'a regulator for controlling the flow of a liquid from a reservoir', 'name': 'faucet'}, {'frequency': 'r', 'synset': 'fedora.n.01', 'synonyms': ['fedora'], 'id': 431, 'def': 'a hat made of felt with a creased crown', 'name': 'fedora'}, {'frequency': 'r', 'synset': 'ferret.n.02', 'synonyms': ['ferret'], 'id': 432, 'def': 'domesticated albino variety of the European polecat bred for hunting rats and rabbits', 'name': 'ferret'}, {'frequency': 'c', 'synset': 'ferris_wheel.n.01', 'synonyms': ['Ferris_wheel'], 'id': 433, 'def': 'a large wheel with suspended seats that remain upright as the wheel rotates', 'name': 'Ferris_wheel'}, {'frequency': 'c', 'synset': 'ferry.n.01', 'synonyms': ['ferry', 'ferryboat'], 'id': 434, 'def': 'a boat that transports people or vehicles across a body of water and operates on a regular schedule', 'name': 'ferry'}, {'frequency': 'r', 'synset': 'fig.n.04', 'synonyms': ['fig_(fruit)'], 'id': 435, 'def': 'fleshy sweet pear-shaped yellowish or purple fruit eaten fresh or preserved or dried', 'name': 'fig_(fruit)'}, {'frequency': 'c', 'synset': 'fighter.n.02', 'synonyms': ['fighter_jet', 'fighter_aircraft', 'attack_aircraft'], 'id': 436, 'def': 'a high-speed military or naval airplane designed to destroy enemy targets', 'name': 'fighter_jet'}, {'frequency': 'f', 'synset': 'figurine.n.01', 'synonyms': ['figurine'], 'id': 437, 'def': 'a small carved or molded figure', 'name': 'figurine'}, {'frequency': 'c', 'synset': 'file.n.03', 'synonyms': ['file_cabinet', 'filing_cabinet'], 'id': 438, 'def': 'office furniture consisting of a container for keeping papers in order', 'name': 'file_cabinet'}, {'frequency': 'r', 'synset': 'file.n.04', 'synonyms': ['file_(tool)'], 'id': 439, 'def': 'a steel hand tool with small sharp teeth on some or all of its surfaces; used for smoothing wood or metal', 'name': 'file_(tool)'}, {'frequency': 'f', 'synset': 'fire_alarm.n.02', 'synonyms': ['fire_alarm', 'smoke_alarm'], 'id': 440, 'def': 'an alarm that is tripped off by fire or smoke', 'name': 'fire_alarm'}, {'frequency': 'f', 'synset': 'fire_engine.n.01', 'synonyms': ['fire_engine', 'fire_truck'], 'id': 441, 'def': 'large trucks that carry firefighters and equipment to the site of a fire', 'name': 'fire_engine'}, {'frequency': 'f', 'synset': 'fire_extinguisher.n.01', 'synonyms': ['fire_extinguisher', 'extinguisher'], 'id': 442, 'def': 'a manually operated device for extinguishing small fires', 'name': 'fire_extinguisher'}, {'frequency': 'c', 'synset': 'fire_hose.n.01', 'synonyms': ['fire_hose'], 'id': 443, 'def': 'a large hose that carries water from a fire hydrant to the site of the fire', 'name': 'fire_hose'}, {'frequency': 'f', 'synset': 'fireplace.n.01', 'synonyms': ['fireplace'], 'id': 444, 'def': 'an open recess in a wall at the base of a chimney where a fire can be built', 'name': 'fireplace'}, {'frequency': 'f', 'synset': 'fireplug.n.01', 'synonyms': ['fireplug', 'fire_hydrant', 'hydrant'], 'id': 445, 'def': 'an upright hydrant for drawing water to use in fighting a fire', 'name': 'fireplug'}, {'frequency': 'r', 'synset': 'first-aid_kit.n.01', 'synonyms': ['first-aid_kit'], 'id': 446, 'def': 'kit consisting of a set of bandages and medicines for giving first aid', 'name': 'first-aid_kit'}, {'frequency': 'f', 'synset': 'fish.n.01', 'synonyms': ['fish'], 'id': 447, 'def': 'any of various mostly cold-blooded aquatic vertebrates usually having scales and breathing through gills', 'name': 'fish'}, {'frequency': 'c', 'synset': 'fish.n.02', 'synonyms': ['fish_(food)'], 'id': 448, 'def': 'the flesh of fish used as food', 'name': 'fish_(food)'}, {'frequency': 'r', 'synset': 'fishbowl.n.02', 'synonyms': ['fishbowl', 'goldfish_bowl'], 'id': 449, 'def': 'a transparent bowl in which small fish are kept', 'name': 'fishbowl'}, {'frequency': 'c', 'synset': 'fishing_rod.n.01', 'synonyms': ['fishing_rod', 'fishing_pole'], 'id': 450, 'def': 'a rod that is used in fishing to extend the fishing line', 'name': 'fishing_rod'}, {'frequency': 'f', 'synset': 'flag.n.01', 'synonyms': ['flag'], 'id': 451, 'def': 'emblem usually consisting of a rectangular piece of cloth of distinctive design (do not include pole)', 'name': 'flag'}, {'frequency': 'f', 'synset': 'flagpole.n.02', 'synonyms': ['flagpole', 'flagstaff'], 'id': 452, 'def': 'a tall staff or pole on which a flag is raised', 'name': 'flagpole'}, {'frequency': 'c', 'synset': 'flamingo.n.01', 'synonyms': ['flamingo'], 'id': 453, 'def': 'large pink web-footed bird with down-bent bill', 'name': 'flamingo'}, {'frequency': 'c', 'synset': 'flannel.n.01', 'synonyms': ['flannel'], 'id': 454, 'def': 'a soft light woolen fabric; used for clothing', 'name': 'flannel'}, {'frequency': 'c', 'synset': 'flap.n.01', 'synonyms': ['flap'], 'id': 455, 'def': 'any broad thin covering attached at one edge, such as a mud flap next to a wheel or a flap on an airplane wing', 'name': 'flap'}, {'frequency': 'r', 'synset': 'flash.n.10', 'synonyms': ['flash', 'flashbulb'], 'id': 456, 'def': 'a lamp for providing momentary light to take a photograph', 'name': 'flash'}, {'frequency': 'c', 'synset': 'flashlight.n.01', 'synonyms': ['flashlight', 'torch'], 'id': 457, 'def': 'a small portable battery-powered electric lamp', 'name': 'flashlight'}, {'frequency': 'r', 'synset': 'fleece.n.03', 'synonyms': ['fleece'], 'id': 458, 'def': 'a soft bulky fabric with deep pile; used chiefly for clothing', 'name': 'fleece'}, {'frequency': 'f', 'synset': 'flip-flop.n.02', 'synonyms': ['flip-flop_(sandal)'], 'id': 459, 'def': 'a backless sandal held to the foot by a thong between two toes', 'name': 'flip-flop_(sandal)'}, {'frequency': 'c', 'synset': 'flipper.n.01', 'synonyms': ['flipper_(footwear)', 'fin_(footwear)'], 'id': 460, 'def': 'a shoe to aid a person in swimming', 'name': 'flipper_(footwear)'}, {'frequency': 'f', 'synset': 'flower_arrangement.n.01', 'synonyms': ['flower_arrangement', 'floral_arrangement'], 'id': 461, 'def': 'a decorative arrangement of flowers', 'name': 'flower_arrangement'}, {'frequency': 'c', 'synset': 'flute.n.02', 'synonyms': ['flute_glass', 'champagne_flute'], 'id': 462, 'def': 'a tall narrow wineglass', 'name': 'flute_glass'}, {'frequency': 'c', 'synset': 'foal.n.01', 'synonyms': ['foal'], 'id': 463, 'def': 'a young horse', 'name': 'foal'}, {'frequency': 'c', 'synset': 'folding_chair.n.01', 'synonyms': ['folding_chair'], 'id': 464, 'def': 'a chair that can be folded flat for storage', 'name': 'folding_chair'}, {'frequency': 'c', 'synset': 'food_processor.n.01', 'synonyms': ['food_processor'], 'id': 465, 'def': 'a kitchen appliance for shredding, blending, chopping, or slicing food', 'name': 'food_processor'}, {'frequency': 'c', 'synset': 'football.n.02', 'synonyms': ['football_(American)'], 'id': 466, 'def': 'the inflated oblong ball used in playing American football', 'name': 'football_(American)'}, {'frequency': 'r', 'synset': 'football_helmet.n.01', 'synonyms': ['football_helmet'], 'id': 467, 'def': 'a padded helmet with a face mask to protect the head of football players', 'name': 'football_helmet'}, {'frequency': 'c', 'synset': 'footstool.n.01', 'synonyms': ['footstool', 'footrest'], 'id': 468, 'def': 'a low seat or a stool to rest the feet of a seated person', 'name': 'footstool'}, {'frequency': 'f', 'synset': 'fork.n.01', 'synonyms': ['fork'], 'id': 469, 'def': 'cutlery used for serving and eating food', 'name': 'fork'}, {'frequency': 'c', 'synset': 'forklift.n.01', 'synonyms': ['forklift'], 'id': 470, 'def': 'an industrial vehicle with a power operated fork in front that can be inserted under loads to lift and move them', 'name': 'forklift'}, {'frequency': 'c', 'synset': 'freight_car.n.01', 'synonyms': ['freight_car'], 'id': 471, 'def': 'a railway car that carries freight', 'name': 'freight_car'}, {'frequency': 'c', 'synset': 'french_toast.n.01', 'synonyms': ['French_toast'], 'id': 472, 'def': 'bread slice dipped in egg and milk and fried', 'name': 'French_toast'}, {'frequency': 'c', 'synset': 'freshener.n.01', 'synonyms': ['freshener', 'air_freshener'], 'id': 473, 'def': 'anything that freshens air by removing or covering odor', 'name': 'freshener'}, {'frequency': 'f', 'synset': 'frisbee.n.01', 'synonyms': ['frisbee'], 'id': 474, 'def': 'a light, plastic disk propelled with a flip of the wrist for recreation or competition', 'name': 'frisbee'}, {'frequency': 'c', 'synset': 'frog.n.01', 'synonyms': ['frog', 'toad', 'toad_frog'], 'id': 475, 'def': 'a tailless stout-bodied amphibians with long hind limbs for leaping', 'name': 'frog'}, {'frequency': 'c', 'synset': 'fruit_juice.n.01', 'synonyms': ['fruit_juice'], 'id': 476, 'def': 'drink produced by squeezing or crushing fruit', 'name': 'fruit_juice'}, {'frequency': 'f', 'synset': 'frying_pan.n.01', 'synonyms': ['frying_pan', 'frypan', 'skillet'], 'id': 477, 'def': 'a pan used for frying foods', 'name': 'frying_pan'}, {'frequency': 'r', 'synset': 'fudge.n.01', 'synonyms': ['fudge'], 'id': 478, 'def': 'soft creamy candy', 'name': 'fudge'}, {'frequency': 'r', 'synset': 'funnel.n.02', 'synonyms': ['funnel'], 'id': 479, 'def': 'a cone-shaped utensil used to channel a substance into a container with a small mouth', 'name': 'funnel'}, {'frequency': 'r', 'synset': 'futon.n.01', 'synonyms': ['futon'], 'id': 480, 'def': 'a pad that is used for sleeping on the floor or on a raised frame', 'name': 'futon'}, {'frequency': 'r', 'synset': 'gag.n.02', 'synonyms': ['gag', 'muzzle'], 'id': 481, 'def': "restraint put into a person's mouth to prevent speaking or shouting", 'name': 'gag'}, {'frequency': 'r', 'synset': 'garbage.n.03', 'synonyms': ['garbage'], 'id': 482, 'def': 'a receptacle where waste can be discarded', 'name': 'garbage'}, {'frequency': 'c', 'synset': 'garbage_truck.n.01', 'synonyms': ['garbage_truck'], 'id': 483, 'def': 'a truck for collecting domestic refuse', 'name': 'garbage_truck'}, {'frequency': 'c', 'synset': 'garden_hose.n.01', 'synonyms': ['garden_hose'], 'id': 484, 'def': 'a hose used for watering a lawn or garden', 'name': 'garden_hose'}, {'frequency': 'c', 'synset': 'gargle.n.01', 'synonyms': ['gargle', 'mouthwash'], 'id': 485, 'def': 'a medicated solution used for gargling and rinsing the mouth', 'name': 'gargle'}, {'frequency': 'r', 'synset': 'gargoyle.n.02', 'synonyms': ['gargoyle'], 'id': 486, 'def': 'an ornament consisting of a grotesquely carved figure of a person or animal', 'name': 'gargoyle'}, {'frequency': 'c', 'synset': 'garlic.n.02', 'synonyms': ['garlic', 'ail'], 'id': 487, 'def': 'aromatic bulb used as seasoning', 'name': 'garlic'}, {'frequency': 'r', 'synset': 'gasmask.n.01', 'synonyms': ['gasmask', 'respirator', 'gas_helmet'], 'id': 488, 'def': 'a protective face mask with a filter', 'name': 'gasmask'}, {'frequency': 'c', 'synset': 'gazelle.n.01', 'synonyms': ['gazelle'], 'id': 489, 'def': 'small swift graceful antelope of Africa and Asia having lustrous eyes', 'name': 'gazelle'}, {'frequency': 'c', 'synset': 'gelatin.n.02', 'synonyms': ['gelatin', 'jelly'], 'id': 490, 'def': 'an edible jelly made with gelatin and used as a dessert or salad base or a coating for foods', 'name': 'gelatin'}, {'frequency': 'r', 'synset': 'gem.n.02', 'synonyms': ['gemstone'], 'id': 491, 'def': 'a crystalline rock that can be cut and polished for jewelry', 'name': 'gemstone'}, {'frequency': 'r', 'synset': 'generator.n.02', 'synonyms': ['generator'], 'id': 492, 'def': 'engine that converts mechanical energy into electrical energy by electromagnetic induction', 'name': 'generator'}, {'frequency': 'c', 'synset': 'giant_panda.n.01', 'synonyms': ['giant_panda', 'panda', 'panda_bear'], 'id': 493, 'def': 'large black-and-white herbivorous mammal of bamboo forests of China and Tibet', 'name': 'giant_panda'}, {'frequency': 'c', 'synset': 'gift_wrap.n.01', 'synonyms': ['gift_wrap'], 'id': 494, 'def': 'attractive wrapping paper suitable for wrapping gifts', 'name': 'gift_wrap'}, {'frequency': 'c', 'synset': 'ginger.n.03', 'synonyms': ['ginger', 'gingerroot'], 'id': 495, 'def': 'the root of the common ginger plant; used fresh as a seasoning', 'name': 'ginger'}, {'frequency': 'f', 'synset': 'giraffe.n.01', 'synonyms': ['giraffe'], 'id': 496, 'def': 'tall animal having a spotted coat and small horns and very long neck and legs', 'name': 'giraffe'}, {'frequency': 'c', 'synset': 'girdle.n.02', 'synonyms': ['cincture', 'sash', 'waistband', 'waistcloth'], 'id': 497, 'def': 'a band of material around the waist that strengthens a skirt or trousers', 'name': 'cincture'}, {'frequency': 'f', 'synset': 'glass.n.02', 'synonyms': ['glass_(drink_container)', 'drinking_glass'], 'id': 498, 'def': 'a container for holding liquids while drinking', 'name': 'glass_(drink_container)'}, {'frequency': 'c', 'synset': 'globe.n.03', 'synonyms': ['globe'], 'id': 499, 'def': 'a sphere on which a map (especially of the earth) is represented', 'name': 'globe'}, {'frequency': 'f', 'synset': 'glove.n.02', 'synonyms': ['glove'], 'id': 500, 'def': 'handwear covering the hand', 'name': 'glove'}, {'frequency': 'c', 'synset': 'goat.n.01', 'synonyms': ['goat'], 'id': 501, 'def': 'a common goat', 'name': 'goat'}, {'frequency': 'f', 'synset': 'goggles.n.01', 'synonyms': ['goggles'], 'id': 502, 'def': 'tight-fitting spectacles worn to protect the eyes', 'name': 'goggles'}, {'frequency': 'r', 'synset': 'goldfish.n.01', 'synonyms': ['goldfish'], 'id': 503, 'def': 'small golden or orange-red freshwater fishes used as pond or aquarium pets', 'name': 'goldfish'}, {'frequency': 'c', 'synset': 'golf_club.n.02', 'synonyms': ['golf_club', 'golf-club'], 'id': 504, 'def': 'golf equipment used by a golfer to hit a golf ball', 'name': 'golf_club'}, {'frequency': 'c', 'synset': 'golfcart.n.01', 'synonyms': ['golfcart'], 'id': 505, 'def': 'a small motor vehicle in which golfers can ride between shots', 'name': 'golfcart'}, {'frequency': 'r', 'synset': 'gondola.n.02', 'synonyms': ['gondola_(boat)'], 'id': 506, 'def': 'long narrow flat-bottomed boat propelled by sculling; traditionally used on canals of Venice', 'name': 'gondola_(boat)'}, {'frequency': 'c', 'synset': 'goose.n.01', 'synonyms': ['goose'], 'id': 507, 'def': 'loud, web-footed long-necked aquatic birds usually larger than ducks', 'name': 'goose'}, {'frequency': 'r', 'synset': 'gorilla.n.01', 'synonyms': ['gorilla'], 'id': 508, 'def': 'largest ape', 'name': 'gorilla'}, {'frequency': 'r', 'synset': 'gourd.n.02', 'synonyms': ['gourd'], 'id': 509, 'def': 'any of numerous inedible fruits with hard rinds', 'name': 'gourd'}, {'frequency': 'f', 'synset': 'grape.n.01', 'synonyms': ['grape'], 'id': 510, 'def': 'any of various juicy fruit with green or purple skins; grow in clusters', 'name': 'grape'}, {'frequency': 'c', 'synset': 'grater.n.01', 'synonyms': ['grater'], 'id': 511, 'def': 'utensil with sharp perforations for shredding foods (as vegetables or cheese)', 'name': 'grater'}, {'frequency': 'c', 'synset': 'gravestone.n.01', 'synonyms': ['gravestone', 'headstone', 'tombstone'], 'id': 512, 'def': 'a stone that is used to mark a grave', 'name': 'gravestone'}, {'frequency': 'r', 'synset': 'gravy_boat.n.01', 'synonyms': ['gravy_boat', 'gravy_holder'], 'id': 513, 'def': 'a dish (often boat-shaped) for serving gravy or sauce', 'name': 'gravy_boat'}, {'frequency': 'f', 'synset': 'green_bean.n.02', 'synonyms': ['green_bean'], 'id': 514, 'def': 'a common bean plant cultivated for its slender green edible pods', 'name': 'green_bean'}, {'frequency': 'f', 'synset': 'green_onion.n.01', 'synonyms': ['green_onion', 'spring_onion', 'scallion'], 'id': 515, 'def': 'a young onion before the bulb has enlarged', 'name': 'green_onion'}, {'frequency': 'r', 'synset': 'griddle.n.01', 'synonyms': ['griddle'], 'id': 516, 'def': 'cooking utensil consisting of a flat heated surface on which food is cooked', 'name': 'griddle'}, {'frequency': 'f', 'synset': 'grill.n.02', 'synonyms': ['grill', 'grille', 'grillwork', 'radiator_grille'], 'id': 517, 'def': 'a framework of metal bars used as a partition or a grate', 'name': 'grill'}, {'frequency': 'r', 'synset': 'grits.n.01', 'synonyms': ['grits', 'hominy_grits'], 'id': 518, 'def': 'coarsely ground corn boiled as a breakfast dish', 'name': 'grits'}, {'frequency': 'c', 'synset': 'grizzly.n.01', 'synonyms': ['grizzly', 'grizzly_bear'], 'id': 519, 'def': 'powerful brownish-yellow bear of the uplands of western North America', 'name': 'grizzly'}, {'frequency': 'c', 'synset': 'grocery_bag.n.01', 'synonyms': ['grocery_bag'], 'id': 520, 'def': "a sack for holding customer's groceries", 'name': 'grocery_bag'}, {'frequency': 'f', 'synset': 'guitar.n.01', 'synonyms': ['guitar'], 'id': 521, 'def': 'a stringed instrument usually having six strings; played by strumming or plucking', 'name': 'guitar'}, {'frequency': 'c', 'synset': 'gull.n.02', 'synonyms': ['gull', 'seagull'], 'id': 522, 'def': 'mostly white aquatic bird having long pointed wings and short legs', 'name': 'gull'}, {'frequency': 'c', 'synset': 'gun.n.01', 'synonyms': ['gun'], 'id': 523, 'def': 'a weapon that discharges a bullet at high velocity from a metal tube', 'name': 'gun'}, {'frequency': 'f', 'synset': 'hairbrush.n.01', 'synonyms': ['hairbrush'], 'id': 524, 'def': "a brush used to groom a person's hair", 'name': 'hairbrush'}, {'frequency': 'c', 'synset': 'hairnet.n.01', 'synonyms': ['hairnet'], 'id': 525, 'def': 'a small net that someone wears over their hair to keep it in place', 'name': 'hairnet'}, {'frequency': 'c', 'synset': 'hairpin.n.01', 'synonyms': ['hairpin'], 'id': 526, 'def': "a double pronged pin used to hold women's hair in place", 'name': 'hairpin'}, {'frequency': 'r', 'synset': 'halter.n.03', 'synonyms': ['halter_top'], 'id': 527, 'def': "a woman's top that fastens behind the back and neck leaving the back and arms uncovered", 'name': 'halter_top'}, {'frequency': 'f', 'synset': 'ham.n.01', 'synonyms': ['ham', 'jambon', 'gammon'], 'id': 528, 'def': 'meat cut from the thigh of a hog (usually smoked)', 'name': 'ham'}, {'frequency': 'c', 'synset': 'hamburger.n.01', 'synonyms': ['hamburger', 'beefburger', 'burger'], 'id': 529, 'def': 'a sandwich consisting of a patty of minced beef served on a bun', 'name': 'hamburger'}, {'frequency': 'c', 'synset': 'hammer.n.02', 'synonyms': ['hammer'], 'id': 530, 'def': 'a hand tool with a heavy head and a handle; used to deliver an impulsive force by striking', 'name': 'hammer'}, {'frequency': 'c', 'synset': 'hammock.n.02', 'synonyms': ['hammock'], 'id': 531, 'def': 'a hanging bed of canvas or rope netting (usually suspended between two trees)', 'name': 'hammock'}, {'frequency': 'r', 'synset': 'hamper.n.02', 'synonyms': ['hamper'], 'id': 532, 'def': 'a basket usually with a cover', 'name': 'hamper'}, {'frequency': 'c', 'synset': 'hamster.n.01', 'synonyms': ['hamster'], 'id': 533, 'def': 'short-tailed burrowing rodent with large cheek pouches', 'name': 'hamster'}, {'frequency': 'f', 'synset': 'hand_blower.n.01', 'synonyms': ['hair_dryer'], 'id': 534, 'def': 'a hand-held electric blower that can blow warm air onto the hair', 'name': 'hair_dryer'}, {'frequency': 'r', 'synset': 'hand_glass.n.01', 'synonyms': ['hand_glass', 'hand_mirror'], 'id': 535, 'def': 'a mirror intended to be held in the hand', 'name': 'hand_glass'}, {'frequency': 'f', 'synset': 'hand_towel.n.01', 'synonyms': ['hand_towel', 'face_towel'], 'id': 536, 'def': 'a small towel used to dry the hands or face', 'name': 'hand_towel'}, {'frequency': 'c', 'synset': 'handcart.n.01', 'synonyms': ['handcart', 'pushcart', 'hand_truck'], 'id': 537, 'def': 'wheeled vehicle that can be pushed by a person', 'name': 'handcart'}, {'frequency': 'r', 'synset': 'handcuff.n.01', 'synonyms': ['handcuff'], 'id': 538, 'def': 'shackle that consists of a metal loop that can be locked around the wrist', 'name': 'handcuff'}, {'frequency': 'c', 'synset': 'handkerchief.n.01', 'synonyms': ['handkerchief'], 'id': 539, 'def': 'a square piece of cloth used for wiping the eyes or nose or as a costume accessory', 'name': 'handkerchief'}, {'frequency': 'f', 'synset': 'handle.n.01', 'synonyms': ['handle', 'grip', 'handgrip'], 'id': 540, 'def': 'the appendage to an object that is designed to be held in order to use or move it', 'name': 'handle'}, {'frequency': 'r', 'synset': 'handsaw.n.01', 'synonyms': ['handsaw', "carpenter's_saw"], 'id': 541, 'def': 'a saw used with one hand for cutting wood', 'name': 'handsaw'}, {'frequency': 'r', 'synset': 'hardback.n.01', 'synonyms': ['hardback_book', 'hardcover_book'], 'id': 542, 'def': 'a book with cardboard or cloth or leather covers', 'name': 'hardback_book'}, {'frequency': 'r', 'synset': 'harmonium.n.01', 'synonyms': ['harmonium', 'organ_(musical_instrument)', 'reed_organ_(musical_instrument)'], 'id': 543, 'def': 'a free-reed instrument in which air is forced through the reeds by bellows', 'name': 'harmonium'}, {'frequency': 'f', 'synset': 'hat.n.01', 'synonyms': ['hat'], 'id': 544, 'def': 'headwear that protects the head from bad weather, sun, or worn for fashion', 'name': 'hat'}, {'frequency': 'r', 'synset': 'hatbox.n.01', 'synonyms': ['hatbox'], 'id': 545, 'def': 'a round piece of luggage for carrying hats', 'name': 'hatbox'}, {'frequency': 'c', 'synset': 'head_covering.n.01', 'synonyms': ['veil'], 'id': 546, 'def': 'a garment that covers the head OR face', 'name': 'veil'}, {'frequency': 'f', 'synset': 'headband.n.01', 'synonyms': ['headband'], 'id': 547, 'def': 'a band worn around or over the head', 'name': 'headband'}, {'frequency': 'f', 'synset': 'headboard.n.01', 'synonyms': ['headboard'], 'id': 548, 'def': 'a vertical board or panel forming the head of a bedstead', 'name': 'headboard'}, {'frequency': 'f', 'synset': 'headlight.n.01', 'synonyms': ['headlight', 'headlamp'], 'id': 549, 'def': 'a powerful light with reflector; attached to the front of an automobile or locomotive', 'name': 'headlight'}, {'frequency': 'c', 'synset': 'headscarf.n.01', 'synonyms': ['headscarf'], 'id': 550, 'def': 'a kerchief worn over the head and tied under the chin', 'name': 'headscarf'}, {'frequency': 'r', 'synset': 'headset.n.01', 'synonyms': ['headset'], 'id': 551, 'def': 'receiver consisting of a pair of headphones', 'name': 'headset'}, {'frequency': 'c', 'synset': 'headstall.n.01', 'synonyms': ['headstall_(for_horses)', 'headpiece_(for_horses)'], 'id': 552, 'def': "the band that is the part of a bridle that fits around a horse's head", 'name': 'headstall_(for_horses)'}, {'frequency': 'c', 'synset': 'heart.n.02', 'synonyms': ['heart'], 'id': 553, 'def': 'a muscular organ; its contractions move the blood through the body', 'name': 'heart'}, {'frequency': 'c', 'synset': 'heater.n.01', 'synonyms': ['heater', 'warmer'], 'id': 554, 'def': 'device that heats water or supplies warmth to a room', 'name': 'heater'}, {'frequency': 'c', 'synset': 'helicopter.n.01', 'synonyms': ['helicopter'], 'id': 555, 'def': 'an aircraft without wings that obtains its lift from the rotation of overhead blades', 'name': 'helicopter'}, {'frequency': 'f', 'synset': 'helmet.n.02', 'synonyms': ['helmet'], 'id': 556, 'def': 'a protective headgear made of hard material to resist blows', 'name': 'helmet'}, {'frequency': 'r', 'synset': 'heron.n.02', 'synonyms': ['heron'], 'id': 557, 'def': 'grey or white wading bird with long neck and long legs and (usually) long bill', 'name': 'heron'}, {'frequency': 'c', 'synset': 'highchair.n.01', 'synonyms': ['highchair', 'feeding_chair'], 'id': 558, 'def': 'a chair for feeding a very young child', 'name': 'highchair'}, {'frequency': 'f', 'synset': 'hinge.n.01', 'synonyms': ['hinge'], 'id': 559, 'def': 'a joint that holds two parts together so that one can swing relative to the other', 'name': 'hinge'}, {'frequency': 'r', 'synset': 'hippopotamus.n.01', 'synonyms': ['hippopotamus'], 'id': 560, 'def': 'massive thick-skinned animal living in or around rivers of tropical Africa', 'name': 'hippopotamus'}, {'frequency': 'r', 'synset': 'hockey_stick.n.01', 'synonyms': ['hockey_stick'], 'id': 561, 'def': 'sports implement consisting of a stick used by hockey players to move the puck', 'name': 'hockey_stick'}, {'frequency': 'c', 'synset': 'hog.n.03', 'synonyms': ['hog', 'pig'], 'id': 562, 'def': 'domestic swine', 'name': 'hog'}, {'frequency': 'f', 'synset': 'home_plate.n.01', 'synonyms': ['home_plate_(baseball)', 'home_base_(baseball)'], 'id': 563, 'def': '(baseball) a rubber slab where the batter stands; it must be touched by a base runner in order to score', 'name': 'home_plate_(baseball)'}, {'frequency': 'c', 'synset': 'honey.n.01', 'synonyms': ['honey'], 'id': 564, 'def': 'a sweet yellow liquid produced by bees', 'name': 'honey'}, {'frequency': 'f', 'synset': 'hood.n.06', 'synonyms': ['fume_hood', 'exhaust_hood'], 'id': 565, 'def': 'metal covering leading to a vent that exhausts smoke or fumes', 'name': 'fume_hood'}, {'frequency': 'f', 'synset': 'hook.n.05', 'synonyms': ['hook'], 'id': 566, 'def': 'a curved or bent implement for suspending or pulling something', 'name': 'hook'}, {'frequency': 'r', 'synset': 'hookah.n.01', 'synonyms': ['hookah', 'narghile', 'nargileh', 'sheesha', 'shisha', 'water_pipe'], 'id': 567, 'def': 'a tobacco pipe with a long flexible tube connected to a container where the smoke is cooled by passing through water', 'name': 'hookah'}, {'frequency': 'r', 'synset': 'hornet.n.01', 'synonyms': ['hornet'], 'id': 568, 'def': 'large stinging wasp', 'name': 'hornet'}, {'frequency': 'f', 'synset': 'horse.n.01', 'synonyms': ['horse'], 'id': 569, 'def': 'a common horse', 'name': 'horse'}, {'frequency': 'f', 'synset': 'hose.n.03', 'synonyms': ['hose', 'hosepipe'], 'id': 570, 'def': 'a flexible pipe for conveying a liquid or gas', 'name': 'hose'}, {'frequency': 'r', 'synset': 'hot-air_balloon.n.01', 'synonyms': ['hot-air_balloon'], 'id': 571, 'def': 'balloon for travel through the air in a basket suspended below a large bag of heated air', 'name': 'hot-air_balloon'}, {'frequency': 'r', 'synset': 'hot_plate.n.01', 'synonyms': ['hotplate'], 'id': 572, 'def': 'a portable electric appliance for heating or cooking or keeping food warm', 'name': 'hotplate'}, {'frequency': 'c', 'synset': 'hot_sauce.n.01', 'synonyms': ['hot_sauce'], 'id': 573, 'def': 'a pungent peppery sauce', 'name': 'hot_sauce'}, {'frequency': 'r', 'synset': 'hourglass.n.01', 'synonyms': ['hourglass'], 'id': 574, 'def': 'a sandglass timer that runs for sixty minutes', 'name': 'hourglass'}, {'frequency': 'r', 'synset': 'houseboat.n.01', 'synonyms': ['houseboat'], 'id': 575, 'def': 'a barge that is designed and equipped for use as a dwelling', 'name': 'houseboat'}, {'frequency': 'c', 'synset': 'hummingbird.n.01', 'synonyms': ['hummingbird'], 'id': 576, 'def': 'tiny American bird having brilliant iridescent plumage and long slender bills', 'name': 'hummingbird'}, {'frequency': 'r', 'synset': 'hummus.n.01', 'synonyms': ['hummus', 'humus', 'hommos', 'hoummos', 'humous'], 'id': 577, 'def': 'a thick spread made from mashed chickpeas', 'name': 'hummus'}, {'frequency': 'f', 'synset': 'ice_bear.n.01', 'synonyms': ['polar_bear'], 'id': 578, 'def': 'white bear of Arctic regions', 'name': 'polar_bear'}, {'frequency': 'c', 'synset': 'ice_cream.n.01', 'synonyms': ['icecream'], 'id': 579, 'def': 'frozen dessert containing cream and sugar and flavoring', 'name': 'icecream'}, {'frequency': 'r', 'synset': 'ice_lolly.n.01', 'synonyms': ['popsicle'], 'id': 580, 'def': 'ice cream or water ice on a small wooden stick', 'name': 'popsicle'}, {'frequency': 'c', 'synset': 'ice_maker.n.01', 'synonyms': ['ice_maker'], 'id': 581, 'def': 'an appliance included in some electric refrigerators for making ice cubes', 'name': 'ice_maker'}, {'frequency': 'r', 'synset': 'ice_pack.n.01', 'synonyms': ['ice_pack', 'ice_bag'], 'id': 582, 'def': 'a waterproof bag filled with ice: applied to the body (especially the head) to cool or reduce swelling', 'name': 'ice_pack'}, {'frequency': 'r', 'synset': 'ice_skate.n.01', 'synonyms': ['ice_skate'], 'id': 583, 'def': 'skate consisting of a boot with a steel blade fitted to the sole', 'name': 'ice_skate'}, {'frequency': 'c', 'synset': 'igniter.n.01', 'synonyms': ['igniter', 'ignitor', 'lighter'], 'id': 584, 'def': 'a substance or device used to start a fire', 'name': 'igniter'}, {'frequency': 'r', 'synset': 'inhaler.n.01', 'synonyms': ['inhaler', 'inhalator'], 'id': 585, 'def': 'a dispenser that produces a chemical vapor to be inhaled through mouth or nose', 'name': 'inhaler'}, {'frequency': 'f', 'synset': 'ipod.n.01', 'synonyms': ['iPod'], 'id': 586, 'def': 'a pocket-sized device used to play music files', 'name': 'iPod'}, {'frequency': 'c', 'synset': 'iron.n.04', 'synonyms': ['iron_(for_clothing)', 'smoothing_iron_(for_clothing)'], 'id': 587, 'def': 'home appliance consisting of a flat metal base that is heated and used to smooth cloth', 'name': 'iron_(for_clothing)'}, {'frequency': 'c', 'synset': 'ironing_board.n.01', 'synonyms': ['ironing_board'], 'id': 588, 'def': 'narrow padded board on collapsible supports; used for ironing clothes', 'name': 'ironing_board'}, {'frequency': 'f', 'synset': 'jacket.n.01', 'synonyms': ['jacket'], 'id': 589, 'def': 'a waist-length coat', 'name': 'jacket'}, {'frequency': 'c', 'synset': 'jam.n.01', 'synonyms': ['jam'], 'id': 590, 'def': 'preserve of crushed fruit', 'name': 'jam'}, {'frequency': 'f', 'synset': 'jar.n.01', 'synonyms': ['jar'], 'id': 591, 'def': 'a vessel (usually cylindrical) with a wide mouth and without handles', 'name': 'jar'}, {'frequency': 'f', 'synset': 'jean.n.01', 'synonyms': ['jean', 'blue_jean', 'denim'], 'id': 592, 'def': '(usually plural) close-fitting trousers of heavy denim for manual work or casual wear', 'name': 'jean'}, {'frequency': 'c', 'synset': 'jeep.n.01', 'synonyms': ['jeep', 'landrover'], 'id': 593, 'def': 'a car suitable for traveling over rough terrain', 'name': 'jeep'}, {'frequency': 'r', 'synset': 'jelly_bean.n.01', 'synonyms': ['jelly_bean', 'jelly_egg'], 'id': 594, 'def': 'sugar-glazed jellied candy', 'name': 'jelly_bean'}, {'frequency': 'f', 'synset': 'jersey.n.03', 'synonyms': ['jersey', 'T-shirt', 'tee_shirt'], 'id': 595, 'def': 'a close-fitting pullover shirt', 'name': 'jersey'}, {'frequency': 'c', 'synset': 'jet.n.01', 'synonyms': ['jet_plane', 'jet-propelled_plane'], 'id': 596, 'def': 'an airplane powered by one or more jet engines', 'name': 'jet_plane'}, {'frequency': 'r', 'synset': 'jewel.n.01', 'synonyms': ['jewel', 'gem', 'precious_stone'], 'id': 597, 'def': 'a precious or semiprecious stone incorporated into a piece of jewelry', 'name': 'jewel'}, {'frequency': 'c', 'synset': 'jewelry.n.01', 'synonyms': ['jewelry', 'jewellery'], 'id': 598, 'def': 'an adornment (as a bracelet or ring or necklace) made of precious metals and set with gems (or imitation gems)', 'name': 'jewelry'}, {'frequency': 'r', 'synset': 'joystick.n.02', 'synonyms': ['joystick'], 'id': 599, 'def': 'a control device for computers consisting of a vertical handle that can move freely in two directions', 'name': 'joystick'}, {'frequency': 'c', 'synset': 'jump_suit.n.01', 'synonyms': ['jumpsuit'], 'id': 600, 'def': "one-piece garment fashioned after a parachutist's uniform", 'name': 'jumpsuit'}, {'frequency': 'c', 'synset': 'kayak.n.01', 'synonyms': ['kayak'], 'id': 601, 'def': 'a small canoe consisting of a light frame made watertight with animal skins', 'name': 'kayak'}, {'frequency': 'r', 'synset': 'keg.n.02', 'synonyms': ['keg'], 'id': 602, 'def': 'small cask or barrel', 'name': 'keg'}, {'frequency': 'r', 'synset': 'kennel.n.01', 'synonyms': ['kennel', 'doghouse'], 'id': 603, 'def': 'outbuilding that serves as a shelter for a dog', 'name': 'kennel'}, {'frequency': 'c', 'synset': 'kettle.n.01', 'synonyms': ['kettle', 'boiler'], 'id': 604, 'def': 'a metal pot for stewing or boiling; usually has a lid', 'name': 'kettle'}, {'frequency': 'f', 'synset': 'key.n.01', 'synonyms': ['key'], 'id': 605, 'def': 'metal instrument used to unlock a lock', 'name': 'key'}, {'frequency': 'r', 'synset': 'keycard.n.01', 'synonyms': ['keycard'], 'id': 606, 'def': 'a plastic card used to gain access typically to a door', 'name': 'keycard'}, {'frequency': 'c', 'synset': 'kilt.n.01', 'synonyms': ['kilt'], 'id': 607, 'def': 'a knee-length pleated tartan skirt worn by men as part of the traditional dress in the Highlands of northern Scotland', 'name': 'kilt'}, {'frequency': 'c', 'synset': 'kimono.n.01', 'synonyms': ['kimono'], 'id': 608, 'def': 'a loose robe; imitated from robes originally worn by Japanese', 'name': 'kimono'}, {'frequency': 'f', 'synset': 'kitchen_sink.n.01', 'synonyms': ['kitchen_sink'], 'id': 609, 'def': 'a sink in a kitchen', 'name': 'kitchen_sink'}, {'frequency': 'r', 'synset': 'kitchen_table.n.01', 'synonyms': ['kitchen_table'], 'id': 610, 'def': 'a table in the kitchen', 'name': 'kitchen_table'}, {'frequency': 'f', 'synset': 'kite.n.03', 'synonyms': ['kite'], 'id': 611, 'def': 'plaything consisting of a light frame covered with tissue paper; flown in wind at end of a string', 'name': 'kite'}, {'frequency': 'c', 'synset': 'kitten.n.01', 'synonyms': ['kitten', 'kitty'], 'id': 612, 'def': 'young domestic cat', 'name': 'kitten'}, {'frequency': 'c', 'synset': 'kiwi.n.03', 'synonyms': ['kiwi_fruit'], 'id': 613, 'def': 'fuzzy brown egg-shaped fruit with slightly tart green flesh', 'name': 'kiwi_fruit'}, {'frequency': 'f', 'synset': 'knee_pad.n.01', 'synonyms': ['knee_pad'], 'id': 614, 'def': 'protective garment consisting of a pad worn by football or baseball or hockey players', 'name': 'knee_pad'}, {'frequency': 'f', 'synset': 'knife.n.01', 'synonyms': ['knife'], 'id': 615, 'def': 'tool with a blade and point used as a cutting instrument', 'name': 'knife'}, {'frequency': 'r', 'synset': 'knitting_needle.n.01', 'synonyms': ['knitting_needle'], 'id': 616, 'def': 'needle consisting of a slender rod with pointed ends; usually used in pairs', 'name': 'knitting_needle'}, {'frequency': 'f', 'synset': 'knob.n.02', 'synonyms': ['knob'], 'id': 617, 'def': 'a round handle often found on a door', 'name': 'knob'}, {'frequency': 'r', 'synset': 'knocker.n.05', 'synonyms': ['knocker_(on_a_door)', 'doorknocker'], 'id': 618, 'def': 'a device (usually metal and ornamental) attached by a hinge to a door', 'name': 'knocker_(on_a_door)'}, {'frequency': 'r', 'synset': 'koala.n.01', 'synonyms': ['koala', 'koala_bear'], 'id': 619, 'def': 'sluggish tailless Australian marsupial with grey furry ears and coat', 'name': 'koala'}, {'frequency': 'r', 'synset': 'lab_coat.n.01', 'synonyms': ['lab_coat', 'laboratory_coat'], 'id': 620, 'def': 'a light coat worn to protect clothing from substances used while working in a laboratory', 'name': 'lab_coat'}, {'frequency': 'f', 'synset': 'ladder.n.01', 'synonyms': ['ladder'], 'id': 621, 'def': 'steps consisting of two parallel members connected by rungs', 'name': 'ladder'}, {'frequency': 'c', 'synset': 'ladle.n.01', 'synonyms': ['ladle'], 'id': 622, 'def': 'a spoon-shaped vessel with a long handle frequently used to transfer liquids', 'name': 'ladle'}, {'frequency': 'c', 'synset': 'ladybug.n.01', 'synonyms': ['ladybug', 'ladybeetle', 'ladybird_beetle'], 'id': 623, 'def': 'small round bright-colored and spotted beetle, typically red and black', 'name': 'ladybug'}, {'frequency': 'f', 'synset': 'lamb.n.01', 'synonyms': ['lamb_(animal)'], 'id': 624, 'def': 'young sheep', 'name': 'lamb_(animal)'}, {'frequency': 'r', 'synset': 'lamb_chop.n.01', 'synonyms': ['lamb-chop', 'lambchop'], 'id': 625, 'def': 'chop cut from a lamb', 'name': 'lamb-chop'}, {'frequency': 'f', 'synset': 'lamp.n.02', 'synonyms': ['lamp'], 'id': 626, 'def': 'a piece of furniture holding one or more electric light bulbs', 'name': 'lamp'}, {'frequency': 'f', 'synset': 'lamppost.n.01', 'synonyms': ['lamppost'], 'id': 627, 'def': 'a metal post supporting an outdoor lamp (such as a streetlight)', 'name': 'lamppost'}, {'frequency': 'f', 'synset': 'lampshade.n.01', 'synonyms': ['lampshade'], 'id': 628, 'def': 'a protective ornamental shade used to screen a light bulb from direct view', 'name': 'lampshade'}, {'frequency': 'c', 'synset': 'lantern.n.01', 'synonyms': ['lantern'], 'id': 629, 'def': 'light in a transparent protective case', 'name': 'lantern'}, {'frequency': 'f', 'synset': 'lanyard.n.02', 'synonyms': ['lanyard', 'laniard'], 'id': 630, 'def': 'a cord worn around the neck to hold a knife or whistle, etc.', 'name': 'lanyard'}, {'frequency': 'f', 'synset': 'laptop.n.01', 'synonyms': ['laptop_computer', 'notebook_computer'], 'id': 631, 'def': 'a portable computer small enough to use in your lap', 'name': 'laptop_computer'}, {'frequency': 'r', 'synset': 'lasagna.n.01', 'synonyms': ['lasagna', 'lasagne'], 'id': 632, 'def': 'baked dish of layers of lasagna pasta with sauce and cheese and meat or vegetables', 'name': 'lasagna'}, {'frequency': 'f', 'synset': 'latch.n.02', 'synonyms': ['latch'], 'id': 633, 'def': 'a bar that can be lowered or slid into a groove to fasten a door or gate', 'name': 'latch'}, {'frequency': 'r', 'synset': 'lawn_mower.n.01', 'synonyms': ['lawn_mower'], 'id': 634, 'def': 'garden tool for mowing grass on lawns', 'name': 'lawn_mower'}, {'frequency': 'r', 'synset': 'leather.n.01', 'synonyms': ['leather'], 'id': 635, 'def': 'an animal skin made smooth and flexible by removing the hair and then tanning', 'name': 'leather'}, {'frequency': 'c', 'synset': 'legging.n.01', 'synonyms': ['legging_(clothing)', 'leging_(clothing)', 'leg_covering'], 'id': 636, 'def': 'a garment covering the leg (usually extending from the knee to the ankle)', 'name': 'legging_(clothing)'}, {'frequency': 'c', 'synset': 'lego.n.01', 'synonyms': ['Lego', 'Lego_set'], 'id': 637, 'def': "a child's plastic construction set for making models from blocks", 'name': 'Lego'}, {'frequency': 'r', 'synset': 'legume.n.02', 'synonyms': ['legume'], 'id': 638, 'def': 'the fruit or seed of bean or pea plants', 'name': 'legume'}, {'frequency': 'f', 'synset': 'lemon.n.01', 'synonyms': ['lemon'], 'id': 639, 'def': 'yellow oval fruit with juicy acidic flesh', 'name': 'lemon'}, {'frequency': 'r', 'synset': 'lemonade.n.01', 'synonyms': ['lemonade'], 'id': 640, 'def': 'sweetened beverage of diluted lemon juice', 'name': 'lemonade'}, {'frequency': 'f', 'synset': 'lettuce.n.02', 'synonyms': ['lettuce'], 'id': 641, 'def': 'leafy plant commonly eaten in salad or on sandwiches', 'name': 'lettuce'}, {'frequency': 'f', 'synset': 'license_plate.n.01', 'synonyms': ['license_plate', 'numberplate'], 'id': 642, 'def': "a plate mounted on the front and back of car and bearing the car's registration number", 'name': 'license_plate'}, {'frequency': 'f', 'synset': 'life_buoy.n.01', 'synonyms': ['life_buoy', 'lifesaver', 'life_belt', 'life_ring'], 'id': 643, 'def': 'a ring-shaped life preserver used to prevent drowning (NOT a life-jacket or vest)', 'name': 'life_buoy'}, {'frequency': 'f', 'synset': 'life_jacket.n.01', 'synonyms': ['life_jacket', 'life_vest'], 'id': 644, 'def': 'life preserver consisting of a sleeveless jacket of buoyant or inflatable design', 'name': 'life_jacket'}, {'frequency': 'f', 'synset': 'light_bulb.n.01', 'synonyms': ['lightbulb'], 'id': 645, 'def': 'lightblub/source of light', 'name': 'lightbulb'}, {'frequency': 'r', 'synset': 'lightning_rod.n.02', 'synonyms': ['lightning_rod', 'lightning_conductor'], 'id': 646, 'def': 'a metallic conductor that is attached to a high point and leads to the ground', 'name': 'lightning_rod'}, {'frequency': 'f', 'synset': 'lime.n.06', 'synonyms': ['lime'], 'id': 647, 'def': 'the green acidic fruit of any of various lime trees', 'name': 'lime'}, {'frequency': 'r', 'synset': 'limousine.n.01', 'synonyms': ['limousine'], 'id': 648, 'def': 'long luxurious car; usually driven by a chauffeur', 'name': 'limousine'}, {'frequency': 'c', 'synset': 'lion.n.01', 'synonyms': ['lion'], 'id': 649, 'def': 'large gregarious predatory cat of Africa and India', 'name': 'lion'}, {'frequency': 'c', 'synset': 'lip_balm.n.01', 'synonyms': ['lip_balm'], 'id': 650, 'def': 'a balm applied to the lips', 'name': 'lip_balm'}, {'frequency': 'r', 'synset': 'liquor.n.01', 'synonyms': ['liquor', 'spirits', 'hard_liquor', 'liqueur', 'cordial'], 'id': 651, 'def': 'liquor or beer', 'name': 'liquor'}, {'frequency': 'c', 'synset': 'lizard.n.01', 'synonyms': ['lizard'], 'id': 652, 'def': 'a reptile with usually two pairs of legs and a tapering tail', 'name': 'lizard'}, {'frequency': 'f', 'synset': 'log.n.01', 'synonyms': ['log'], 'id': 653, 'def': 'a segment of the trunk of a tree when stripped of branches', 'name': 'log'}, {'frequency': 'c', 'synset': 'lollipop.n.02', 'synonyms': ['lollipop'], 'id': 654, 'def': 'hard candy on a stick', 'name': 'lollipop'}, {'frequency': 'f', 'synset': 'loudspeaker.n.01', 'synonyms': ['speaker_(stero_equipment)'], 'id': 655, 'def': 'electronic device that produces sound often as part of a stereo system', 'name': 'speaker_(stero_equipment)'}, {'frequency': 'c', 'synset': 'love_seat.n.01', 'synonyms': ['loveseat'], 'id': 656, 'def': 'small sofa that seats two people', 'name': 'loveseat'}, {'frequency': 'r', 'synset': 'machine_gun.n.01', 'synonyms': ['machine_gun'], 'id': 657, 'def': 'a rapidly firing automatic gun', 'name': 'machine_gun'}, {'frequency': 'f', 'synset': 'magazine.n.02', 'synonyms': ['magazine'], 'id': 658, 'def': 'a paperback periodic publication', 'name': 'magazine'}, {'frequency': 'f', 'synset': 'magnet.n.01', 'synonyms': ['magnet'], 'id': 659, 'def': 'a device that attracts iron and produces a magnetic field', 'name': 'magnet'}, {'frequency': 'c', 'synset': 'mail_slot.n.01', 'synonyms': ['mail_slot'], 'id': 660, 'def': 'a slot (usually in a door) through which mail can be delivered', 'name': 'mail_slot'}, {'frequency': 'f', 'synset': 'mailbox.n.01', 'synonyms': ['mailbox_(at_home)', 'letter_box_(at_home)'], 'id': 661, 'def': 'a private box for delivery of mail', 'name': 'mailbox_(at_home)'}, {'frequency': 'r', 'synset': 'mallard.n.01', 'synonyms': ['mallard'], 'id': 662, 'def': 'wild dabbling duck from which domestic ducks are descended', 'name': 'mallard'}, {'frequency': 'r', 'synset': 'mallet.n.01', 'synonyms': ['mallet'], 'id': 663, 'def': 'a sports implement with a long handle and a hammer-like head used to hit a ball', 'name': 'mallet'}, {'frequency': 'r', 'synset': 'mammoth.n.01', 'synonyms': ['mammoth'], 'id': 664, 'def': 'any of numerous extinct elephants widely distributed in the Pleistocene', 'name': 'mammoth'}, {'frequency': 'r', 'synset': 'manatee.n.01', 'synonyms': ['manatee'], 'id': 665, 'def': 'sirenian mammal of tropical coastal waters of America', 'name': 'manatee'}, {'frequency': 'c', 'synset': 'mandarin.n.05', 'synonyms': ['mandarin_orange'], 'id': 666, 'def': 'a somewhat flat reddish-orange loose skinned citrus of China', 'name': 'mandarin_orange'}, {'frequency': 'c', 'synset': 'manger.n.01', 'synonyms': ['manger', 'trough'], 'id': 667, 'def': 'a container (usually in a barn or stable) from which cattle or horses feed', 'name': 'manger'}, {'frequency': 'f', 'synset': 'manhole.n.01', 'synonyms': ['manhole'], 'id': 668, 'def': 'a hole (usually with a flush cover) through which a person can gain access to an underground structure', 'name': 'manhole'}, {'frequency': 'f', 'synset': 'map.n.01', 'synonyms': ['map'], 'id': 669, 'def': "a diagrammatic representation of the earth's surface (or part of it)", 'name': 'map'}, {'frequency': 'f', 'synset': 'marker.n.03', 'synonyms': ['marker'], 'id': 670, 'def': 'a writing implement for making a mark', 'name': 'marker'}, {'frequency': 'r', 'synset': 'martini.n.01', 'synonyms': ['martini'], 'id': 671, 'def': 'a cocktail made of gin (or vodka) with dry vermouth', 'name': 'martini'}, {'frequency': 'r', 'synset': 'mascot.n.01', 'synonyms': ['mascot'], 'id': 672, 'def': 'a person or animal that is adopted by a team or other group as a symbolic figure', 'name': 'mascot'}, {'frequency': 'c', 'synset': 'mashed_potato.n.01', 'synonyms': ['mashed_potato'], 'id': 673, 'def': 'potato that has been peeled and boiled and then mashed', 'name': 'mashed_potato'}, {'frequency': 'r', 'synset': 'masher.n.02', 'synonyms': ['masher'], 'id': 674, 'def': 'a kitchen utensil used for mashing (e.g. potatoes)', 'name': 'masher'}, {'frequency': 'f', 'synset': 'mask.n.04', 'synonyms': ['mask', 'facemask'], 'id': 675, 'def': 'a protective covering worn over the face', 'name': 'mask'}, {'frequency': 'f', 'synset': 'mast.n.01', 'synonyms': ['mast'], 'id': 676, 'def': 'a vertical spar for supporting sails', 'name': 'mast'}, {'frequency': 'c', 'synset': 'mat.n.03', 'synonyms': ['mat_(gym_equipment)', 'gym_mat'], 'id': 677, 'def': 'sports equipment consisting of a piece of thick padding on the floor for gymnastics', 'name': 'mat_(gym_equipment)'}, {'frequency': 'r', 'synset': 'matchbox.n.01', 'synonyms': ['matchbox'], 'id': 678, 'def': 'a box for holding matches', 'name': 'matchbox'}, {'frequency': 'f', 'synset': 'mattress.n.01', 'synonyms': ['mattress'], 'id': 679, 'def': 'a thick pad filled with resilient material used as a bed or part of a bed', 'name': 'mattress'}, {'frequency': 'c', 'synset': 'measuring_cup.n.01', 'synonyms': ['measuring_cup'], 'id': 680, 'def': 'graduated cup used to measure liquid or granular ingredients', 'name': 'measuring_cup'}, {'frequency': 'c', 'synset': 'measuring_stick.n.01', 'synonyms': ['measuring_stick', 'ruler_(measuring_stick)', 'measuring_rod'], 'id': 681, 'def': 'measuring instrument having a sequence of marks at regular intervals', 'name': 'measuring_stick'}, {'frequency': 'c', 'synset': 'meatball.n.01', 'synonyms': ['meatball'], 'id': 682, 'def': 'ground meat formed into a ball and fried or simmered in broth', 'name': 'meatball'}, {'frequency': 'c', 'synset': 'medicine.n.02', 'synonyms': ['medicine'], 'id': 683, 'def': 'something that treats or prevents or alleviates the symptoms of disease', 'name': 'medicine'}, {'frequency': 'c', 'synset': 'melon.n.01', 'synonyms': ['melon'], 'id': 684, 'def': 'fruit of the gourd family having a hard rind and sweet juicy flesh', 'name': 'melon'}, {'frequency': 'f', 'synset': 'microphone.n.01', 'synonyms': ['microphone'], 'id': 685, 'def': 'device for converting sound waves into electrical energy', 'name': 'microphone'}, {'frequency': 'r', 'synset': 'microscope.n.01', 'synonyms': ['microscope'], 'id': 686, 'def': 'magnifier of the image of small objects', 'name': 'microscope'}, {'frequency': 'f', 'synset': 'microwave.n.02', 'synonyms': ['microwave_oven'], 'id': 687, 'def': 'kitchen appliance that cooks food by passing an electromagnetic wave through it', 'name': 'microwave_oven'}, {'frequency': 'r', 'synset': 'milestone.n.01', 'synonyms': ['milestone', 'milepost'], 'id': 688, 'def': 'stone post at side of a road to show distances', 'name': 'milestone'}, {'frequency': 'f', 'synset': 'milk.n.01', 'synonyms': ['milk'], 'id': 689, 'def': 'a white nutritious liquid secreted by mammals and used as food by human beings', 'name': 'milk'}, {'frequency': 'r', 'synset': 'milk_can.n.01', 'synonyms': ['milk_can'], 'id': 690, 'def': 'can for transporting milk', 'name': 'milk_can'}, {'frequency': 'r', 'synset': 'milkshake.n.01', 'synonyms': ['milkshake'], 'id': 691, 'def': 'frothy drink of milk and flavoring and sometimes fruit or ice cream', 'name': 'milkshake'}, {'frequency': 'f', 'synset': 'minivan.n.01', 'synonyms': ['minivan'], 'id': 692, 'def': 'a small box-shaped passenger van', 'name': 'minivan'}, {'frequency': 'r', 'synset': 'mint.n.05', 'synonyms': ['mint_candy'], 'id': 693, 'def': 'a candy that is flavored with a mint oil', 'name': 'mint_candy'}, {'frequency': 'f', 'synset': 'mirror.n.01', 'synonyms': ['mirror'], 'id': 694, 'def': 'polished surface that forms images by reflecting light', 'name': 'mirror'}, {'frequency': 'c', 'synset': 'mitten.n.01', 'synonyms': ['mitten'], 'id': 695, 'def': 'glove that encases the thumb separately and the other four fingers together', 'name': 'mitten'}, {'frequency': 'c', 'synset': 'mixer.n.04', 'synonyms': ['mixer_(kitchen_tool)', 'stand_mixer'], 'id': 696, 'def': 'a kitchen utensil that is used for mixing foods', 'name': 'mixer_(kitchen_tool)'}, {'frequency': 'c', 'synset': 'money.n.03', 'synonyms': ['money'], 'id': 697, 'def': 'the official currency issued by a government or national bank', 'name': 'money'}, {'frequency': 'f', 'synset': 'monitor.n.04', 'synonyms': ['monitor_(computer_equipment) computer_monitor'], 'id': 698, 'def': 'a computer monitor', 'name': 'monitor_(computer_equipment) computer_monitor'}, {'frequency': 'c', 'synset': 'monkey.n.01', 'synonyms': ['monkey'], 'id': 699, 'def': 'any of various long-tailed primates', 'name': 'monkey'}, {'frequency': 'f', 'synset': 'motor.n.01', 'synonyms': ['motor'], 'id': 700, 'def': 'machine that converts other forms of energy into mechanical energy and so imparts motion', 'name': 'motor'}, {'frequency': 'f', 'synset': 'motor_scooter.n.01', 'synonyms': ['motor_scooter', 'scooter'], 'id': 701, 'def': 'a wheeled vehicle with small wheels and a low-powered engine', 'name': 'motor_scooter'}, {'frequency': 'r', 'synset': 'motor_vehicle.n.01', 'synonyms': ['motor_vehicle', 'automotive_vehicle'], 'id': 702, 'def': 'a self-propelled wheeled vehicle that does not run on rails', 'name': 'motor_vehicle'}, {'frequency': 'f', 'synset': 'motorcycle.n.01', 'synonyms': ['motorcycle'], 'id': 703, 'def': 'a motor vehicle with two wheels and a strong frame', 'name': 'motorcycle'}, {'frequency': 'f', 'synset': 'mound.n.01', 'synonyms': ['mound_(baseball)', "pitcher's_mound"], 'id': 704, 'def': '(baseball) the slight elevation on which the pitcher stands', 'name': 'mound_(baseball)'}, {'frequency': 'f', 'synset': 'mouse.n.04', 'synonyms': ['mouse_(computer_equipment)', 'computer_mouse'], 'id': 705, 'def': 'a computer input device that controls an on-screen pointer (does not include trackpads / touchpads)', 'name': 'mouse_(computer_equipment)'}, {'frequency': 'f', 'synset': 'mousepad.n.01', 'synonyms': ['mousepad'], 'id': 706, 'def': 'a small portable pad that provides an operating surface for a computer mouse', 'name': 'mousepad'}, {'frequency': 'c', 'synset': 'muffin.n.01', 'synonyms': ['muffin'], 'id': 707, 'def': 'a sweet quick bread baked in a cup-shaped pan', 'name': 'muffin'}, {'frequency': 'f', 'synset': 'mug.n.04', 'synonyms': ['mug'], 'id': 708, 'def': 'with handle and usually cylindrical', 'name': 'mug'}, {'frequency': 'f', 'synset': 'mushroom.n.02', 'synonyms': ['mushroom'], 'id': 709, 'def': 'a common mushroom', 'name': 'mushroom'}, {'frequency': 'r', 'synset': 'music_stool.n.01', 'synonyms': ['music_stool', 'piano_stool'], 'id': 710, 'def': 'a stool for piano players; usually adjustable in height', 'name': 'music_stool'}, {'frequency': 'c', 'synset': 'musical_instrument.n.01', 'synonyms': ['musical_instrument', 'instrument_(musical)'], 'id': 711, 'def': 'any of various devices or contrivances that can be used to produce musical tones or sounds', 'name': 'musical_instrument'}, {'frequency': 'r', 'synset': 'nailfile.n.01', 'synonyms': ['nailfile'], 'id': 712, 'def': 'a small flat file for shaping the nails', 'name': 'nailfile'}, {'frequency': 'f', 'synset': 'napkin.n.01', 'synonyms': ['napkin', 'table_napkin', 'serviette'], 'id': 713, 'def': 'a small piece of table linen or paper that is used to wipe the mouth and to cover the lap in order to protect clothing', 'name': 'napkin'}, {'frequency': 'r', 'synset': 'neckerchief.n.01', 'synonyms': ['neckerchief'], 'id': 714, 'def': 'a kerchief worn around the neck', 'name': 'neckerchief'}, {'frequency': 'f', 'synset': 'necklace.n.01', 'synonyms': ['necklace'], 'id': 715, 'def': 'jewelry consisting of a cord or chain (often bearing gems) worn about the neck as an ornament', 'name': 'necklace'}, {'frequency': 'f', 'synset': 'necktie.n.01', 'synonyms': ['necktie', 'tie_(necktie)'], 'id': 716, 'def': 'neckwear consisting of a long narrow piece of material worn under a collar and tied in knot at the front', 'name': 'necktie'}, {'frequency': 'c', 'synset': 'needle.n.03', 'synonyms': ['needle'], 'id': 717, 'def': 'a sharp pointed implement (usually metal)', 'name': 'needle'}, {'frequency': 'c', 'synset': 'nest.n.01', 'synonyms': ['nest'], 'id': 718, 'def': 'a structure in which animals lay eggs or give birth to their young', 'name': 'nest'}, {'frequency': 'f', 'synset': 'newspaper.n.01', 'synonyms': ['newspaper', 'paper_(newspaper)'], 'id': 719, 'def': 'a daily or weekly publication on folded sheets containing news, articles, and advertisements', 'name': 'newspaper'}, {'frequency': 'c', 'synset': 'newsstand.n.01', 'synonyms': ['newsstand'], 'id': 720, 'def': 'a stall where newspapers and other periodicals are sold', 'name': 'newsstand'}, {'frequency': 'c', 'synset': 'nightwear.n.01', 'synonyms': ['nightshirt', 'nightwear', 'sleepwear', 'nightclothes'], 'id': 721, 'def': 'garments designed to be worn in bed', 'name': 'nightshirt'}, {'frequency': 'r', 'synset': 'nosebag.n.01', 'synonyms': ['nosebag_(for_animals)', 'feedbag'], 'id': 722, 'def': 'a canvas bag that is used to feed an animal (such as a horse); covers the muzzle and fastens at the top of the head', 'name': 'nosebag_(for_animals)'}, {'frequency': 'c', 'synset': 'noseband.n.01', 'synonyms': ['noseband_(for_animals)', 'nosepiece_(for_animals)'], 'id': 723, 'def': "a strap that is the part of a bridle that goes over the animal's nose", 'name': 'noseband_(for_animals)'}, {'frequency': 'f', 'synset': 'notebook.n.01', 'synonyms': ['notebook'], 'id': 724, 'def': 'a book with blank pages for recording notes or memoranda', 'name': 'notebook'}, {'frequency': 'c', 'synset': 'notepad.n.01', 'synonyms': ['notepad'], 'id': 725, 'def': 'a pad of paper for keeping notes', 'name': 'notepad'}, {'frequency': 'f', 'synset': 'nut.n.03', 'synonyms': ['nut'], 'id': 726, 'def': 'a small metal block (usually square or hexagonal) with internal screw thread to be fitted onto a bolt', 'name': 'nut'}, {'frequency': 'r', 'synset': 'nutcracker.n.01', 'synonyms': ['nutcracker'], 'id': 727, 'def': 'a hand tool used to crack nuts open', 'name': 'nutcracker'}, {'frequency': 'f', 'synset': 'oar.n.01', 'synonyms': ['oar'], 'id': 728, 'def': 'an implement used to propel or steer a boat', 'name': 'oar'}, {'frequency': 'r', 'synset': 'octopus.n.01', 'synonyms': ['octopus_(food)'], 'id': 729, 'def': 'tentacles of octopus prepared as food', 'name': 'octopus_(food)'}, {'frequency': 'r', 'synset': 'octopus.n.02', 'synonyms': ['octopus_(animal)'], 'id': 730, 'def': 'bottom-living cephalopod having a soft oval body with eight long tentacles', 'name': 'octopus_(animal)'}, {'frequency': 'c', 'synset': 'oil_lamp.n.01', 'synonyms': ['oil_lamp', 'kerosene_lamp', 'kerosine_lamp'], 'id': 731, 'def': 'a lamp that burns oil (as kerosine) for light', 'name': 'oil_lamp'}, {'frequency': 'c', 'synset': 'olive_oil.n.01', 'synonyms': ['olive_oil'], 'id': 732, 'def': 'oil from olives', 'name': 'olive_oil'}, {'frequency': 'r', 'synset': 'omelet.n.01', 'synonyms': ['omelet', 'omelette'], 'id': 733, 'def': 'beaten eggs cooked until just set; may be folded around e.g. ham or cheese or jelly', 'name': 'omelet'}, {'frequency': 'f', 'synset': 'onion.n.01', 'synonyms': ['onion'], 'id': 734, 'def': 'the bulb of an onion plant', 'name': 'onion'}, {'frequency': 'f', 'synset': 'orange.n.01', 'synonyms': ['orange_(fruit)'], 'id': 735, 'def': 'orange (FRUIT of an orange tree)', 'name': 'orange_(fruit)'}, {'frequency': 'c', 'synset': 'orange_juice.n.01', 'synonyms': ['orange_juice'], 'id': 736, 'def': 'bottled or freshly squeezed juice of oranges', 'name': 'orange_juice'}, {'frequency': 'c', 'synset': 'ostrich.n.02', 'synonyms': ['ostrich'], 'id': 737, 'def': 'fast-running African flightless bird with two-toed feet; largest living bird', 'name': 'ostrich'}, {'frequency': 'f', 'synset': 'ottoman.n.03', 'synonyms': ['ottoman', 'pouf', 'pouffe', 'hassock'], 'id': 738, 'def': 'a thick standalone cushion used as a seat or footrest, often next to a chair', 'name': 'ottoman'}, {'frequency': 'f', 'synset': 'oven.n.01', 'synonyms': ['oven'], 'id': 739, 'def': 'kitchen appliance used for baking or roasting', 'name': 'oven'}, {'frequency': 'c', 'synset': 'overall.n.01', 'synonyms': ['overalls_(clothing)'], 'id': 740, 'def': 'work clothing consisting of denim trousers usually with a bib and shoulder straps', 'name': 'overalls_(clothing)'}, {'frequency': 'c', 'synset': 'owl.n.01', 'synonyms': ['owl'], 'id': 741, 'def': 'nocturnal bird of prey with hawk-like beak and claws and large head with front-facing eyes', 'name': 'owl'}, {'frequency': 'c', 'synset': 'packet.n.03', 'synonyms': ['packet'], 'id': 742, 'def': 'a small package or bundle', 'name': 'packet'}, {'frequency': 'r', 'synset': 'pad.n.03', 'synonyms': ['inkpad', 'inking_pad', 'stamp_pad'], 'id': 743, 'def': 'absorbent material saturated with ink used to transfer ink evenly to a rubber stamp', 'name': 'inkpad'}, {'frequency': 'c', 'synset': 'pad.n.04', 'synonyms': ['pad'], 'id': 744, 'def': 'mostly arm/knee pads labeled', 'name': 'pad'}, {'frequency': 'f', 'synset': 'paddle.n.04', 'synonyms': ['paddle', 'boat_paddle'], 'id': 745, 'def': 'a short light oar used without an oarlock to propel a canoe or small boat', 'name': 'paddle'}, {'frequency': 'c', 'synset': 'padlock.n.01', 'synonyms': ['padlock'], 'id': 746, 'def': 'a detachable, portable lock', 'name': 'padlock'}, {'frequency': 'c', 'synset': 'paintbrush.n.01', 'synonyms': ['paintbrush'], 'id': 747, 'def': 'a brush used as an applicator to apply paint', 'name': 'paintbrush'}, {'frequency': 'f', 'synset': 'painting.n.01', 'synonyms': ['painting'], 'id': 748, 'def': 'graphic art consisting of an artistic composition made by applying paints to a surface', 'name': 'painting'}, {'frequency': 'f', 'synset': 'pajama.n.02', 'synonyms': ['pajamas', 'pyjamas'], 'id': 749, 'def': 'loose-fitting nightclothes worn for sleeping or lounging', 'name': 'pajamas'}, {'frequency': 'c', 'synset': 'palette.n.02', 'synonyms': ['palette', 'pallet'], 'id': 750, 'def': 'board that provides a flat surface on which artists mix paints and the range of colors used', 'name': 'palette'}, {'frequency': 'f', 'synset': 'pan.n.01', 'synonyms': ['pan_(for_cooking)', 'cooking_pan'], 'id': 751, 'def': 'cooking utensil consisting of a wide metal vessel', 'name': 'pan_(for_cooking)'}, {'frequency': 'r', 'synset': 'pan.n.03', 'synonyms': ['pan_(metal_container)'], 'id': 752, 'def': 'shallow container made of metal', 'name': 'pan_(metal_container)'}, {'frequency': 'c', 'synset': 'pancake.n.01', 'synonyms': ['pancake'], 'id': 753, 'def': 'a flat cake of thin batter fried on both sides on a griddle', 'name': 'pancake'}, {'frequency': 'r', 'synset': 'pantyhose.n.01', 'synonyms': ['pantyhose'], 'id': 754, 'def': "a woman's tights consisting of underpants and stockings", 'name': 'pantyhose'}, {'frequency': 'r', 'synset': 'papaya.n.02', 'synonyms': ['papaya'], 'id': 755, 'def': 'large oval melon-like tropical fruit with yellowish flesh', 'name': 'papaya'}, {'frequency': 'f', 'synset': 'paper_plate.n.01', 'synonyms': ['paper_plate'], 'id': 756, 'def': 'a disposable plate made of cardboard', 'name': 'paper_plate'}, {'frequency': 'f', 'synset': 'paper_towel.n.01', 'synonyms': ['paper_towel'], 'id': 757, 'def': 'a disposable towel made of absorbent paper', 'name': 'paper_towel'}, {'frequency': 'r', 'synset': 'paperback_book.n.01', 'synonyms': ['paperback_book', 'paper-back_book', 'softback_book', 'soft-cover_book'], 'id': 758, 'def': 'a book with paper covers', 'name': 'paperback_book'}, {'frequency': 'r', 'synset': 'paperweight.n.01', 'synonyms': ['paperweight'], 'id': 759, 'def': 'a weight used to hold down a stack of papers', 'name': 'paperweight'}, {'frequency': 'c', 'synset': 'parachute.n.01', 'synonyms': ['parachute'], 'id': 760, 'def': 'rescue equipment consisting of a device that fills with air and retards your fall', 'name': 'parachute'}, {'frequency': 'c', 'synset': 'parakeet.n.01', 'synonyms': ['parakeet', 'parrakeet', 'parroket', 'paraquet', 'paroquet', 'parroquet'], 'id': 761, 'def': 'any of numerous small slender long-tailed parrots', 'name': 'parakeet'}, {'frequency': 'c', 'synset': 'parasail.n.01', 'synonyms': ['parasail_(sports)'], 'id': 762, 'def': 'parachute that will lift a person up into the air when it is towed by a motorboat or a car', 'name': 'parasail_(sports)'}, {'frequency': 'c', 'synset': 'parasol.n.01', 'synonyms': ['parasol', 'sunshade'], 'id': 763, 'def': 'a handheld collapsible source of shade', 'name': 'parasol'}, {'frequency': 'r', 'synset': 'parchment.n.01', 'synonyms': ['parchment'], 'id': 764, 'def': 'a superior paper resembling sheepskin', 'name': 'parchment'}, {'frequency': 'c', 'synset': 'parka.n.01', 'synonyms': ['parka', 'anorak'], 'id': 765, 'def': "a kind of heavy jacket (`windcheater' is a British term)", 'name': 'parka'}, {'frequency': 'f', 'synset': 'parking_meter.n.01', 'synonyms': ['parking_meter'], 'id': 766, 'def': 'a coin-operated timer located next to a parking space', 'name': 'parking_meter'}, {'frequency': 'c', 'synset': 'parrot.n.01', 'synonyms': ['parrot'], 'id': 767, 'def': 'usually brightly colored tropical birds with short hooked beaks and the ability to mimic sounds', 'name': 'parrot'}, {'frequency': 'c', 'synset': 'passenger_car.n.01', 'synonyms': ['passenger_car_(part_of_a_train)', 'coach_(part_of_a_train)'], 'id': 768, 'def': 'a railcar where passengers ride', 'name': 'passenger_car_(part_of_a_train)'}, {'frequency': 'r', 'synset': 'passenger_ship.n.01', 'synonyms': ['passenger_ship'], 'id': 769, 'def': 'a ship built to carry passengers', 'name': 'passenger_ship'}, {'frequency': 'c', 'synset': 'passport.n.02', 'synonyms': ['passport'], 'id': 770, 'def': 'a document issued by a country to a citizen allowing that person to travel abroad and re-enter the home country', 'name': 'passport'}, {'frequency': 'f', 'synset': 'pastry.n.02', 'synonyms': ['pastry'], 'id': 771, 'def': 'any of various baked foods made of dough or batter', 'name': 'pastry'}, {'frequency': 'r', 'synset': 'patty.n.01', 'synonyms': ['patty_(food)'], 'id': 772, 'def': 'small flat mass of chopped food', 'name': 'patty_(food)'}, {'frequency': 'c', 'synset': 'pea.n.01', 'synonyms': ['pea_(food)'], 'id': 773, 'def': 'seed of a pea plant used for food', 'name': 'pea_(food)'}, {'frequency': 'c', 'synset': 'peach.n.03', 'synonyms': ['peach'], 'id': 774, 'def': 'downy juicy fruit with sweet yellowish or whitish flesh', 'name': 'peach'}, {'frequency': 'c', 'synset': 'peanut_butter.n.01', 'synonyms': ['peanut_butter'], 'id': 775, 'def': 'a spread made from ground peanuts', 'name': 'peanut_butter'}, {'frequency': 'f', 'synset': 'pear.n.01', 'synonyms': ['pear'], 'id': 776, 'def': 'sweet juicy gritty-textured fruit available in many varieties', 'name': 'pear'}, {'frequency': 'c', 'synset': 'peeler.n.03', 'synonyms': ['peeler_(tool_for_fruit_and_vegetables)'], 'id': 777, 'def': 'a device for peeling vegetables or fruits', 'name': 'peeler_(tool_for_fruit_and_vegetables)'}, {'frequency': 'r', 'synset': 'peg.n.04', 'synonyms': ['wooden_leg', 'pegleg'], 'id': 778, 'def': 'a prosthesis that replaces a missing leg', 'name': 'wooden_leg'}, {'frequency': 'r', 'synset': 'pegboard.n.01', 'synonyms': ['pegboard'], 'id': 779, 'def': 'a board perforated with regularly spaced holes into which pegs can be fitted', 'name': 'pegboard'}, {'frequency': 'c', 'synset': 'pelican.n.01', 'synonyms': ['pelican'], 'id': 780, 'def': 'large long-winged warm-water seabird having a large bill with a distensible pouch for fish', 'name': 'pelican'}, {'frequency': 'f', 'synset': 'pen.n.01', 'synonyms': ['pen'], 'id': 781, 'def': 'a writing implement with a point from which ink flows', 'name': 'pen'}, {'frequency': 'f', 'synset': 'pencil.n.01', 'synonyms': ['pencil'], 'id': 782, 'def': 'a thin cylindrical pointed writing implement made of wood and graphite', 'name': 'pencil'}, {'frequency': 'r', 'synset': 'pencil_box.n.01', 'synonyms': ['pencil_box', 'pencil_case'], 'id': 783, 'def': 'a box for holding pencils', 'name': 'pencil_box'}, {'frequency': 'r', 'synset': 'pencil_sharpener.n.01', 'synonyms': ['pencil_sharpener'], 'id': 784, 'def': 'a rotary implement for sharpening the point on pencils', 'name': 'pencil_sharpener'}, {'frequency': 'r', 'synset': 'pendulum.n.01', 'synonyms': ['pendulum'], 'id': 785, 'def': 'an apparatus consisting of an object mounted so that it swings freely under the influence of gravity', 'name': 'pendulum'}, {'frequency': 'c', 'synset': 'penguin.n.01', 'synonyms': ['penguin'], 'id': 786, 'def': 'short-legged flightless birds of cold southern regions having webbed feet and wings modified as flippers', 'name': 'penguin'}, {'frequency': 'r', 'synset': 'pennant.n.02', 'synonyms': ['pennant'], 'id': 787, 'def': 'a flag longer than it is wide (and often tapering)', 'name': 'pennant'}, {'frequency': 'r', 'synset': 'penny.n.02', 'synonyms': ['penny_(coin)'], 'id': 788, 'def': 'a coin worth one-hundredth of the value of the basic unit', 'name': 'penny_(coin)'}, {'frequency': 'f', 'synset': 'pepper.n.03', 'synonyms': ['pepper', 'peppercorn'], 'id': 789, 'def': 'pungent seasoning from the berry of the common pepper plant; whole or ground', 'name': 'pepper'}, {'frequency': 'c', 'synset': 'pepper_mill.n.01', 'synonyms': ['pepper_mill', 'pepper_grinder'], 'id': 790, 'def': 'a mill for grinding pepper', 'name': 'pepper_mill'}, {'frequency': 'c', 'synset': 'perfume.n.02', 'synonyms': ['perfume'], 'id': 791, 'def': 'a toiletry that emits and diffuses a fragrant odor', 'name': 'perfume'}, {'frequency': 'r', 'synset': 'persimmon.n.02', 'synonyms': ['persimmon'], 'id': 792, 'def': 'orange fruit resembling a plum; edible when fully ripe', 'name': 'persimmon'}, {'frequency': 'f', 'synset': 'person.n.01', 'synonyms': ['person', 'baby', 'child', 'boy', 'girl', 'man', 'woman', 'human'], 'id': 793, 'def': 'a human being', 'name': 'person'}, {'frequency': 'c', 'synset': 'pet.n.01', 'synonyms': ['pet'], 'id': 794, 'def': 'a domesticated animal kept for companionship or amusement', 'name': 'pet'}, {'frequency': 'c', 'synset': 'pew.n.01', 'synonyms': ['pew_(church_bench)', 'church_bench'], 'id': 795, 'def': 'long bench with backs; used in church by the congregation', 'name': 'pew_(church_bench)'}, {'frequency': 'r', 'synset': 'phonebook.n.01', 'synonyms': ['phonebook', 'telephone_book', 'telephone_directory'], 'id': 796, 'def': 'a directory containing an alphabetical list of telephone subscribers and their telephone numbers', 'name': 'phonebook'}, {'frequency': 'c', 'synset': 'phonograph_record.n.01', 'synonyms': ['phonograph_record', 'phonograph_recording', 'record_(phonograph_recording)'], 'id': 797, 'def': 'sound recording consisting of a typically black disk with a continuous groove', 'name': 'phonograph_record'}, {'frequency': 'f', 'synset': 'piano.n.01', 'synonyms': ['piano'], 'id': 798, 'def': 'a keyboard instrument that is played by depressing keys that cause hammers to strike tuned strings and produce sounds', 'name': 'piano'}, {'frequency': 'f', 'synset': 'pickle.n.01', 'synonyms': ['pickle'], 'id': 799, 'def': 'vegetables (especially cucumbers) preserved in brine or vinegar', 'name': 'pickle'}, {'frequency': 'f', 'synset': 'pickup.n.01', 'synonyms': ['pickup_truck'], 'id': 800, 'def': 'a light truck with an open body and low sides and a tailboard', 'name': 'pickup_truck'}, {'frequency': 'c', 'synset': 'pie.n.01', 'synonyms': ['pie'], 'id': 801, 'def': 'dish baked in pastry-lined pan often with a pastry top', 'name': 'pie'}, {'frequency': 'c', 'synset': 'pigeon.n.01', 'synonyms': ['pigeon'], 'id': 802, 'def': 'wild and domesticated birds having a heavy body and short legs', 'name': 'pigeon'}, {'frequency': 'r', 'synset': 'piggy_bank.n.01', 'synonyms': ['piggy_bank', 'penny_bank'], 'id': 803, 'def': "a child's coin bank (often shaped like a pig)", 'name': 'piggy_bank'}, {'frequency': 'f', 'synset': 'pillow.n.01', 'synonyms': ['pillow'], 'id': 804, 'def': 'a cushion to support the head of a sleeping person', 'name': 'pillow'}, {'frequency': 'r', 'synset': 'pin.n.09', 'synonyms': ['pin_(non_jewelry)'], 'id': 805, 'def': 'a small slender (often pointed) piece of wood or metal used to support or fasten or attach things', 'name': 'pin_(non_jewelry)'}, {'frequency': 'f', 'synset': 'pineapple.n.02', 'synonyms': ['pineapple'], 'id': 806, 'def': 'large sweet fleshy tropical fruit with a tuft of stiff leaves', 'name': 'pineapple'}, {'frequency': 'c', 'synset': 'pinecone.n.01', 'synonyms': ['pinecone'], 'id': 807, 'def': 'the seed-producing cone of a pine tree', 'name': 'pinecone'}, {'frequency': 'r', 'synset': 'ping-pong_ball.n.01', 'synonyms': ['ping-pong_ball'], 'id': 808, 'def': 'light hollow ball used in playing table tennis', 'name': 'ping-pong_ball'}, {'frequency': 'r', 'synset': 'pinwheel.n.03', 'synonyms': ['pinwheel'], 'id': 809, 'def': 'a toy consisting of vanes of colored paper or plastic that is pinned to a stick and spins when it is pointed into the wind', 'name': 'pinwheel'}, {'frequency': 'r', 'synset': 'pipe.n.01', 'synonyms': ['tobacco_pipe'], 'id': 810, 'def': 'a tube with a small bowl at one end; used for smoking tobacco', 'name': 'tobacco_pipe'}, {'frequency': 'f', 'synset': 'pipe.n.02', 'synonyms': ['pipe', 'piping'], 'id': 811, 'def': 'a long tube made of metal or plastic that is used to carry water or oil or gas etc.', 'name': 'pipe'}, {'frequency': 'r', 'synset': 'pistol.n.01', 'synonyms': ['pistol', 'handgun'], 'id': 812, 'def': 'a firearm that is held and fired with one hand', 'name': 'pistol'}, {'frequency': 'c', 'synset': 'pita.n.01', 'synonyms': ['pita_(bread)', 'pocket_bread'], 'id': 813, 'def': 'usually small round bread that can open into a pocket for filling', 'name': 'pita_(bread)'}, {'frequency': 'f', 'synset': 'pitcher.n.02', 'synonyms': ['pitcher_(vessel_for_liquid)', 'ewer'], 'id': 814, 'def': 'an open vessel with a handle and a spout for pouring', 'name': 'pitcher_(vessel_for_liquid)'}, {'frequency': 'r', 'synset': 'pitchfork.n.01', 'synonyms': ['pitchfork'], 'id': 815, 'def': 'a long-handled hand tool with sharp widely spaced prongs for lifting and pitching hay', 'name': 'pitchfork'}, {'frequency': 'f', 'synset': 'pizza.n.01', 'synonyms': ['pizza'], 'id': 816, 'def': 'Italian open pie made of thin bread dough spread with a spiced mixture of e.g. tomato sauce and cheese', 'name': 'pizza'}, {'frequency': 'f', 'synset': 'place_mat.n.01', 'synonyms': ['place_mat'], 'id': 817, 'def': 'a mat placed on a table for an individual place setting', 'name': 'place_mat'}, {'frequency': 'f', 'synset': 'plate.n.04', 'synonyms': ['plate'], 'id': 818, 'def': 'dish on which food is served or from which food is eaten', 'name': 'plate'}, {'frequency': 'c', 'synset': 'platter.n.01', 'synonyms': ['platter'], 'id': 819, 'def': 'a large shallow dish used for serving food', 'name': 'platter'}, {'frequency': 'r', 'synset': 'playpen.n.01', 'synonyms': ['playpen'], 'id': 820, 'def': 'a portable enclosure in which babies may be left to play', 'name': 'playpen'}, {'frequency': 'c', 'synset': 'pliers.n.01', 'synonyms': ['pliers', 'plyers'], 'id': 821, 'def': 'a gripping hand tool with two hinged arms and (usually) serrated jaws', 'name': 'pliers'}, {'frequency': 'r', 'synset': 'plow.n.01', 'synonyms': ['plow_(farm_equipment)', 'plough_(farm_equipment)'], 'id': 822, 'def': 'a farm tool having one or more heavy blades to break the soil and cut a furrow prior to sowing', 'name': 'plow_(farm_equipment)'}, {'frequency': 'r', 'synset': 'plume.n.02', 'synonyms': ['plume'], 'id': 823, 'def': 'a feather or cluster of feathers worn as an ornament', 'name': 'plume'}, {'frequency': 'r', 'synset': 'pocket_watch.n.01', 'synonyms': ['pocket_watch'], 'id': 824, 'def': 'a watch that is carried in a small watch pocket', 'name': 'pocket_watch'}, {'frequency': 'c', 'synset': 'pocketknife.n.01', 'synonyms': ['pocketknife'], 'id': 825, 'def': 'a knife with a blade that folds into the handle; suitable for carrying in the pocket', 'name': 'pocketknife'}, {'frequency': 'c', 'synset': 'poker.n.01', 'synonyms': ['poker_(fire_stirring_tool)', 'stove_poker', 'fire_hook'], 'id': 826, 'def': 'fire iron consisting of a metal rod with a handle; used to stir a fire', 'name': 'poker_(fire_stirring_tool)'}, {'frequency': 'f', 'synset': 'pole.n.01', 'synonyms': ['pole', 'post'], 'id': 827, 'def': 'a long (usually round) rod of wood or metal or plastic', 'name': 'pole'}, {'frequency': 'f', 'synset': 'polo_shirt.n.01', 'synonyms': ['polo_shirt', 'sport_shirt'], 'id': 828, 'def': 'a shirt with short sleeves designed for comfort and casual wear', 'name': 'polo_shirt'}, {'frequency': 'r', 'synset': 'poncho.n.01', 'synonyms': ['poncho'], 'id': 829, 'def': 'a blanket-like cloak with a hole in the center for the head', 'name': 'poncho'}, {'frequency': 'c', 'synset': 'pony.n.05', 'synonyms': ['pony'], 'id': 830, 'def': 'any of various breeds of small gentle horses usually less than five feet high at the shoulder', 'name': 'pony'}, {'frequency': 'r', 'synset': 'pool_table.n.01', 'synonyms': ['pool_table', 'billiard_table', 'snooker_table'], 'id': 831, 'def': 'game equipment consisting of a heavy table on which pool is played', 'name': 'pool_table'}, {'frequency': 'f', 'synset': 'pop.n.02', 'synonyms': ['pop_(soda)', 'soda_(pop)', 'tonic', 'soft_drink'], 'id': 832, 'def': 'a sweet drink containing carbonated water and flavoring', 'name': 'pop_(soda)'}, {'frequency': 'c', 'synset': 'postbox.n.01', 'synonyms': ['postbox_(public)', 'mailbox_(public)'], 'id': 833, 'def': 'public box for deposit of mail', 'name': 'postbox_(public)'}, {'frequency': 'c', 'synset': 'postcard.n.01', 'synonyms': ['postcard', 'postal_card', 'mailing-card'], 'id': 834, 'def': 'a card for sending messages by post without an envelope', 'name': 'postcard'}, {'frequency': 'f', 'synset': 'poster.n.01', 'synonyms': ['poster', 'placard'], 'id': 835, 'def': 'a sign posted in a public place as an advertisement', 'name': 'poster'}, {'frequency': 'f', 'synset': 'pot.n.01', 'synonyms': ['pot'], 'id': 836, 'def': 'metal or earthenware cooking vessel that is usually round and deep; often has a handle and lid', 'name': 'pot'}, {'frequency': 'f', 'synset': 'pot.n.04', 'synonyms': ['flowerpot'], 'id': 837, 'def': 'a container in which plants are cultivated', 'name': 'flowerpot'}, {'frequency': 'f', 'synset': 'potato.n.01', 'synonyms': ['potato'], 'id': 838, 'def': 'an edible tuber native to South America', 'name': 'potato'}, {'frequency': 'c', 'synset': 'potholder.n.01', 'synonyms': ['potholder'], 'id': 839, 'def': 'an insulated pad for holding hot pots', 'name': 'potholder'}, {'frequency': 'c', 'synset': 'pottery.n.01', 'synonyms': ['pottery', 'clayware'], 'id': 840, 'def': 'ceramic ware made from clay and baked in a kiln', 'name': 'pottery'}, {'frequency': 'c', 'synset': 'pouch.n.01', 'synonyms': ['pouch'], 'id': 841, 'def': 'a small or medium size container for holding or carrying things', 'name': 'pouch'}, {'frequency': 'c', 'synset': 'power_shovel.n.01', 'synonyms': ['power_shovel', 'excavator', 'digger'], 'id': 842, 'def': 'a machine for excavating', 'name': 'power_shovel'}, {'frequency': 'c', 'synset': 'prawn.n.01', 'synonyms': ['prawn', 'shrimp'], 'id': 843, 'def': 'any of various edible decapod crustaceans', 'name': 'prawn'}, {'frequency': 'c', 'synset': 'pretzel.n.01', 'synonyms': ['pretzel'], 'id': 844, 'def': 'glazed and salted cracker typically in the shape of a loose knot', 'name': 'pretzel'}, {'frequency': 'f', 'synset': 'printer.n.03', 'synonyms': ['printer', 'printing_machine'], 'id': 845, 'def': 'a machine that prints', 'name': 'printer'}, {'frequency': 'c', 'synset': 'projectile.n.01', 'synonyms': ['projectile_(weapon)', 'missile'], 'id': 846, 'def': 'a weapon that is forcibly thrown or projected at a targets', 'name': 'projectile_(weapon)'}, {'frequency': 'c', 'synset': 'projector.n.02', 'synonyms': ['projector'], 'id': 847, 'def': 'an optical instrument that projects an enlarged image onto a screen', 'name': 'projector'}, {'frequency': 'f', 'synset': 'propeller.n.01', 'synonyms': ['propeller', 'propellor'], 'id': 848, 'def': 'a mechanical device that rotates to push against air or water', 'name': 'propeller'}, {'frequency': 'r', 'synset': 'prune.n.01', 'synonyms': ['prune'], 'id': 849, 'def': 'dried plum', 'name': 'prune'}, {'frequency': 'r', 'synset': 'pudding.n.01', 'synonyms': ['pudding'], 'id': 850, 'def': 'any of various soft thick unsweetened baked dishes', 'name': 'pudding'}, {'frequency': 'r', 'synset': 'puffer.n.02', 'synonyms': ['puffer_(fish)', 'pufferfish', 'blowfish', 'globefish'], 'id': 851, 'def': 'fishes whose elongated spiny body can inflate itself with water or air to form a globe', 'name': 'puffer_(fish)'}, {'frequency': 'r', 'synset': 'puffin.n.01', 'synonyms': ['puffin'], 'id': 852, 'def': 'seabirds having short necks and brightly colored compressed bills', 'name': 'puffin'}, {'frequency': 'r', 'synset': 'pug.n.01', 'synonyms': ['pug-dog'], 'id': 853, 'def': 'small compact smooth-coated breed of Asiatic origin having a tightly curled tail and broad flat wrinkled muzzle', 'name': 'pug-dog'}, {'frequency': 'c', 'synset': 'pumpkin.n.02', 'synonyms': ['pumpkin'], 'id': 854, 'def': 'usually large pulpy deep-yellow round fruit of the squash family maturing in late summer or early autumn', 'name': 'pumpkin'}, {'frequency': 'r', 'synset': 'punch.n.03', 'synonyms': ['puncher'], 'id': 855, 'def': 'a tool for making holes or indentations', 'name': 'puncher'}, {'frequency': 'r', 'synset': 'puppet.n.01', 'synonyms': ['puppet', 'marionette'], 'id': 856, 'def': 'a small figure of a person operated from above with strings by a puppeteer', 'name': 'puppet'}, {'frequency': 'c', 'synset': 'puppy.n.01', 'synonyms': ['puppy'], 'id': 857, 'def': 'a young dog', 'name': 'puppy'}, {'frequency': 'r', 'synset': 'quesadilla.n.01', 'synonyms': ['quesadilla'], 'id': 858, 'def': 'a tortilla that is filled with cheese and heated', 'name': 'quesadilla'}, {'frequency': 'r', 'synset': 'quiche.n.02', 'synonyms': ['quiche'], 'id': 859, 'def': 'a tart filled with rich unsweetened custard; often contains other ingredients (as cheese or ham or seafood or vegetables)', 'name': 'quiche'}, {'frequency': 'f', 'synset': 'quilt.n.01', 'synonyms': ['quilt', 'comforter'], 'id': 860, 'def': 'bedding made of two layers of cloth filled with stuffing and stitched together', 'name': 'quilt'}, {'frequency': 'c', 'synset': 'rabbit.n.01', 'synonyms': ['rabbit'], 'id': 861, 'def': 'any of various burrowing animals of the family Leporidae having long ears and short tails', 'name': 'rabbit'}, {'frequency': 'r', 'synset': 'racer.n.02', 'synonyms': ['race_car', 'racing_car'], 'id': 862, 'def': 'a fast car that competes in races', 'name': 'race_car'}, {'frequency': 'c', 'synset': 'racket.n.04', 'synonyms': ['racket', 'racquet'], 'id': 863, 'def': 'a sports implement used to strike a ball in various games', 'name': 'racket'}, {'frequency': 'r', 'synset': 'radar.n.01', 'synonyms': ['radar'], 'id': 864, 'def': 'measuring instrument in which the echo of a pulse of microwave radiation is used to detect and locate distant objects', 'name': 'radar'}, {'frequency': 'f', 'synset': 'radiator.n.03', 'synonyms': ['radiator'], 'id': 865, 'def': 'a mechanism consisting of a metal honeycomb through which hot fluids circulate', 'name': 'radiator'}, {'frequency': 'c', 'synset': 'radio_receiver.n.01', 'synonyms': ['radio_receiver', 'radio_set', 'radio', 'tuner_(radio)'], 'id': 866, 'def': 'an electronic receiver that detects and demodulates and amplifies transmitted radio signals', 'name': 'radio_receiver'}, {'frequency': 'c', 'synset': 'radish.n.03', 'synonyms': ['radish', 'daikon'], 'id': 867, 'def': 'pungent edible root of any of various cultivated radish plants', 'name': 'radish'}, {'frequency': 'c', 'synset': 'raft.n.01', 'synonyms': ['raft'], 'id': 868, 'def': 'a flat float (usually made of logs or planks) that can be used for transport or as a platform for swimmers', 'name': 'raft'}, {'frequency': 'r', 'synset': 'rag_doll.n.01', 'synonyms': ['rag_doll'], 'id': 869, 'def': 'a cloth doll that is stuffed and (usually) painted', 'name': 'rag_doll'}, {'frequency': 'c', 'synset': 'raincoat.n.01', 'synonyms': ['raincoat', 'waterproof_jacket'], 'id': 870, 'def': 'a water-resistant coat', 'name': 'raincoat'}, {'frequency': 'c', 'synset': 'ram.n.05', 'synonyms': ['ram_(animal)'], 'id': 871, 'def': 'uncastrated adult male sheep', 'name': 'ram_(animal)'}, {'frequency': 'c', 'synset': 'raspberry.n.02', 'synonyms': ['raspberry'], 'id': 872, 'def': 'red or black edible aggregate berries usually smaller than the related blackberries', 'name': 'raspberry'}, {'frequency': 'r', 'synset': 'rat.n.01', 'synonyms': ['rat'], 'id': 873, 'def': 'any of various long-tailed rodents similar to but larger than a mouse', 'name': 'rat'}, {'frequency': 'c', 'synset': 'razorblade.n.01', 'synonyms': ['razorblade'], 'id': 874, 'def': 'a blade that has very sharp edge', 'name': 'razorblade'}, {'frequency': 'c', 'synset': 'reamer.n.01', 'synonyms': ['reamer_(juicer)', 'juicer', 'juice_reamer'], 'id': 875, 'def': 'a squeezer with a conical ridged center that is used for squeezing juice from citrus fruit', 'name': 'reamer_(juicer)'}, {'frequency': 'f', 'synset': 'rearview_mirror.n.01', 'synonyms': ['rearview_mirror'], 'id': 876, 'def': 'vehicle mirror (side or rearview)', 'name': 'rearview_mirror'}, {'frequency': 'c', 'synset': 'receipt.n.02', 'synonyms': ['receipt'], 'id': 877, 'def': 'an acknowledgment (usually tangible) that payment has been made', 'name': 'receipt'}, {'frequency': 'c', 'synset': 'recliner.n.01', 'synonyms': ['recliner', 'reclining_chair', 'lounger_(chair)'], 'id': 878, 'def': 'an armchair whose back can be lowered and foot can be raised to allow the sitter to recline in it', 'name': 'recliner'}, {'frequency': 'c', 'synset': 'record_player.n.01', 'synonyms': ['record_player', 'phonograph_(record_player)', 'turntable'], 'id': 879, 'def': 'machine in which rotating records cause a stylus to vibrate and the vibrations are amplified acoustically or electronically', 'name': 'record_player'}, {'frequency': 'f', 'synset': 'reflector.n.01', 'synonyms': ['reflector'], 'id': 880, 'def': 'device that reflects light, radiation, etc.', 'name': 'reflector'}, {'frequency': 'f', 'synset': 'remote_control.n.01', 'synonyms': ['remote_control'], 'id': 881, 'def': 'a device that can be used to control a machine or apparatus from a distance', 'name': 'remote_control'}, {'frequency': 'c', 'synset': 'rhinoceros.n.01', 'synonyms': ['rhinoceros'], 'id': 882, 'def': 'massive powerful herbivorous odd-toed ungulate of southeast Asia and Africa having very thick skin and one or two horns on the snout', 'name': 'rhinoceros'}, {'frequency': 'r', 'synset': 'rib.n.03', 'synonyms': ['rib_(food)'], 'id': 883, 'def': 'cut of meat including one or more ribs', 'name': 'rib_(food)'}, {'frequency': 'c', 'synset': 'rifle.n.01', 'synonyms': ['rifle'], 'id': 884, 'def': 'a shoulder firearm with a long barrel', 'name': 'rifle'}, {'frequency': 'f', 'synset': 'ring.n.08', 'synonyms': ['ring'], 'id': 885, 'def': 'jewelry consisting of a circlet of precious metal (often set with jewels) worn on the finger', 'name': 'ring'}, {'frequency': 'r', 'synset': 'river_boat.n.01', 'synonyms': ['river_boat'], 'id': 886, 'def': 'a boat used on rivers or to ply a river', 'name': 'river_boat'}, {'frequency': 'r', 'synset': 'road_map.n.02', 'synonyms': ['road_map'], 'id': 887, 'def': '(NOT A ROAD) a MAP showing roads (for automobile travel)', 'name': 'road_map'}, {'frequency': 'c', 'synset': 'robe.n.01', 'synonyms': ['robe'], 'id': 888, 'def': 'any loose flowing garment', 'name': 'robe'}, {'frequency': 'c', 'synset': 'rocking_chair.n.01', 'synonyms': ['rocking_chair'], 'id': 889, 'def': 'a chair mounted on rockers', 'name': 'rocking_chair'}, {'frequency': 'r', 'synset': 'rodent.n.01', 'synonyms': ['rodent'], 'id': 890, 'def': 'relatively small placental mammals having a single pair of constantly growing incisor teeth specialized for gnawing', 'name': 'rodent'}, {'frequency': 'r', 'synset': 'roller_skate.n.01', 'synonyms': ['roller_skate'], 'id': 891, 'def': 'a shoe with pairs of rollers (small hard wheels) fixed to the sole', 'name': 'roller_skate'}, {'frequency': 'r', 'synset': 'rollerblade.n.01', 'synonyms': ['Rollerblade'], 'id': 892, 'def': 'an in-line variant of a roller skate', 'name': 'Rollerblade'}, {'frequency': 'c', 'synset': 'rolling_pin.n.01', 'synonyms': ['rolling_pin'], 'id': 893, 'def': 'utensil consisting of a cylinder (usually of wood) with a handle at each end; used to roll out dough', 'name': 'rolling_pin'}, {'frequency': 'r', 'synset': 'root_beer.n.01', 'synonyms': ['root_beer'], 'id': 894, 'def': 'carbonated drink containing extracts of roots and herbs', 'name': 'root_beer'}, {'frequency': 'c', 'synset': 'router.n.02', 'synonyms': ['router_(computer_equipment)'], 'id': 895, 'def': 'a device that forwards data packets between computer networks', 'name': 'router_(computer_equipment)'}, {'frequency': 'f', 'synset': 'rubber_band.n.01', 'synonyms': ['rubber_band', 'elastic_band'], 'id': 896, 'def': 'a narrow band of elastic rubber used to hold things (such as papers) together', 'name': 'rubber_band'}, {'frequency': 'c', 'synset': 'runner.n.08', 'synonyms': ['runner_(carpet)'], 'id': 897, 'def': 'a long narrow carpet', 'name': 'runner_(carpet)'}, {'frequency': 'f', 'synset': 'sack.n.01', 'synonyms': ['plastic_bag', 'paper_bag'], 'id': 898, 'def': "a bag made of paper or plastic for holding customer's purchases", 'name': 'plastic_bag'}, {'frequency': 'f', 'synset': 'saddle.n.01', 'synonyms': ['saddle_(on_an_animal)'], 'id': 899, 'def': 'a seat for the rider of a horse or camel', 'name': 'saddle_(on_an_animal)'}, {'frequency': 'f', 'synset': 'saddle_blanket.n.01', 'synonyms': ['saddle_blanket', 'saddlecloth', 'horse_blanket'], 'id': 900, 'def': 'stable gear consisting of a blanket placed under the saddle', 'name': 'saddle_blanket'}, {'frequency': 'c', 'synset': 'saddlebag.n.01', 'synonyms': ['saddlebag'], 'id': 901, 'def': 'a large bag (or pair of bags) hung over a saddle', 'name': 'saddlebag'}, {'frequency': 'r', 'synset': 'safety_pin.n.01', 'synonyms': ['safety_pin'], 'id': 902, 'def': 'a pin in the form of a clasp; has a guard so the point of the pin will not stick the user', 'name': 'safety_pin'}, {'frequency': 'f', 'synset': 'sail.n.01', 'synonyms': ['sail'], 'id': 903, 'def': 'a large piece of fabric by means of which wind is used to propel a sailing vessel', 'name': 'sail'}, {'frequency': 'f', 'synset': 'salad.n.01', 'synonyms': ['salad'], 'id': 904, 'def': 'food mixtures either arranged on a plate or tossed and served with a moist dressing; usually consisting of or including greens', 'name': 'salad'}, {'frequency': 'r', 'synset': 'salad_plate.n.01', 'synonyms': ['salad_plate', 'salad_bowl'], 'id': 905, 'def': 'a plate or bowl for individual servings of salad', 'name': 'salad_plate'}, {'frequency': 'c', 'synset': 'salami.n.01', 'synonyms': ['salami'], 'id': 906, 'def': 'highly seasoned fatty sausage of pork and beef usually dried', 'name': 'salami'}, {'frequency': 'c', 'synset': 'salmon.n.01', 'synonyms': ['salmon_(fish)'], 'id': 907, 'def': 'any of various large food and game fishes of northern waters', 'name': 'salmon_(fish)'}, {'frequency': 'r', 'synset': 'salmon.n.03', 'synonyms': ['salmon_(food)'], 'id': 908, 'def': 'flesh of any of various marine or freshwater fish of the family Salmonidae', 'name': 'salmon_(food)'}, {'frequency': 'c', 'synset': 'salsa.n.01', 'synonyms': ['salsa'], 'id': 909, 'def': 'spicy sauce of tomatoes and onions and chili peppers to accompany Mexican foods', 'name': 'salsa'}, {'frequency': 'f', 'synset': 'saltshaker.n.01', 'synonyms': ['saltshaker'], 'id': 910, 'def': 'a shaker with a perforated top for sprinkling salt', 'name': 'saltshaker'}, {'frequency': 'f', 'synset': 'sandal.n.01', 'synonyms': ['sandal_(type_of_shoe)'], 'id': 911, 'def': 'a shoe consisting of a sole fastened by straps to the foot', 'name': 'sandal_(type_of_shoe)'}, {'frequency': 'f', 'synset': 'sandwich.n.01', 'synonyms': ['sandwich'], 'id': 912, 'def': 'two (or more) slices of bread with a filling between them', 'name': 'sandwich'}, {'frequency': 'r', 'synset': 'satchel.n.01', 'synonyms': ['satchel'], 'id': 913, 'def': 'luggage consisting of a small case with a flat bottom and (usually) a shoulder strap', 'name': 'satchel'}, {'frequency': 'r', 'synset': 'saucepan.n.01', 'synonyms': ['saucepan'], 'id': 914, 'def': 'a deep pan with a handle; used for stewing or boiling', 'name': 'saucepan'}, {'frequency': 'f', 'synset': 'saucer.n.02', 'synonyms': ['saucer'], 'id': 915, 'def': 'a small shallow dish for holding a cup at the table', 'name': 'saucer'}, {'frequency': 'f', 'synset': 'sausage.n.01', 'synonyms': ['sausage'], 'id': 916, 'def': 'highly seasoned minced meat stuffed in casings', 'name': 'sausage'}, {'frequency': 'r', 'synset': 'sawhorse.n.01', 'synonyms': ['sawhorse', 'sawbuck'], 'id': 917, 'def': 'a framework for holding wood that is being sawed', 'name': 'sawhorse'}, {'frequency': 'r', 'synset': 'sax.n.02', 'synonyms': ['saxophone'], 'id': 918, 'def': "a wind instrument with a `J'-shaped form typically made of brass", 'name': 'saxophone'}, {'frequency': 'f', 'synset': 'scale.n.07', 'synonyms': ['scale_(measuring_instrument)'], 'id': 919, 'def': 'a measuring instrument for weighing; shows amount of mass', 'name': 'scale_(measuring_instrument)'}, {'frequency': 'r', 'synset': 'scarecrow.n.01', 'synonyms': ['scarecrow', 'strawman'], 'id': 920, 'def': 'an effigy in the shape of a man to frighten birds away from seeds', 'name': 'scarecrow'}, {'frequency': 'f', 'synset': 'scarf.n.01', 'synonyms': ['scarf'], 'id': 921, 'def': 'a garment worn around the head or neck or shoulders for warmth or decoration', 'name': 'scarf'}, {'frequency': 'c', 'synset': 'school_bus.n.01', 'synonyms': ['school_bus'], 'id': 922, 'def': 'a bus used to transport children to or from school', 'name': 'school_bus'}, {'frequency': 'f', 'synset': 'scissors.n.01', 'synonyms': ['scissors'], 'id': 923, 'def': 'a tool having two crossed pivoting blades with looped handles', 'name': 'scissors'}, {'frequency': 'f', 'synset': 'scoreboard.n.01', 'synonyms': ['scoreboard'], 'id': 924, 'def': 'a large board for displaying the score of a contest (and some other information)', 'name': 'scoreboard'}, {'frequency': 'r', 'synset': 'scraper.n.01', 'synonyms': ['scraper'], 'id': 925, 'def': 'any of various hand tools for scraping', 'name': 'scraper'}, {'frequency': 'c', 'synset': 'screwdriver.n.01', 'synonyms': ['screwdriver'], 'id': 926, 'def': 'a hand tool for driving screws; has a tip that fits into the head of a screw', 'name': 'screwdriver'}, {'frequency': 'f', 'synset': 'scrub_brush.n.01', 'synonyms': ['scrubbing_brush'], 'id': 927, 'def': 'a brush with short stiff bristles for heavy cleaning', 'name': 'scrubbing_brush'}, {'frequency': 'c', 'synset': 'sculpture.n.01', 'synonyms': ['sculpture'], 'id': 928, 'def': 'a three-dimensional work of art', 'name': 'sculpture'}, {'frequency': 'c', 'synset': 'seabird.n.01', 'synonyms': ['seabird', 'seafowl'], 'id': 929, 'def': 'a bird that frequents coastal waters and the open ocean: gulls; pelicans; gannets; cormorants; albatrosses; petrels; etc.', 'name': 'seabird'}, {'frequency': 'c', 'synset': 'seahorse.n.02', 'synonyms': ['seahorse'], 'id': 930, 'def': 'small fish with horse-like heads bent sharply downward and curled tails', 'name': 'seahorse'}, {'frequency': 'r', 'synset': 'seaplane.n.01', 'synonyms': ['seaplane', 'hydroplane'], 'id': 931, 'def': 'an airplane that can land on or take off from water', 'name': 'seaplane'}, {'frequency': 'c', 'synset': 'seashell.n.01', 'synonyms': ['seashell'], 'id': 932, 'def': 'the shell of a marine organism', 'name': 'seashell'}, {'frequency': 'c', 'synset': 'sewing_machine.n.01', 'synonyms': ['sewing_machine'], 'id': 933, 'def': 'a textile machine used as a home appliance for sewing', 'name': 'sewing_machine'}, {'frequency': 'c', 'synset': 'shaker.n.03', 'synonyms': ['shaker'], 'id': 934, 'def': 'a container in which something can be shaken', 'name': 'shaker'}, {'frequency': 'c', 'synset': 'shampoo.n.01', 'synonyms': ['shampoo'], 'id': 935, 'def': 'cleansing agent consisting of soaps or detergents used for washing the hair', 'name': 'shampoo'}, {'frequency': 'c', 'synset': 'shark.n.01', 'synonyms': ['shark'], 'id': 936, 'def': 'typically large carnivorous fishes with sharpe teeth', 'name': 'shark'}, {'frequency': 'r', 'synset': 'sharpener.n.01', 'synonyms': ['sharpener'], 'id': 937, 'def': 'any implement that is used to make something (an edge or a point) sharper', 'name': 'sharpener'}, {'frequency': 'r', 'synset': 'sharpie.n.03', 'synonyms': ['Sharpie'], 'id': 938, 'def': 'a pen with indelible ink that will write on any surface', 'name': 'Sharpie'}, {'frequency': 'r', 'synset': 'shaver.n.03', 'synonyms': ['shaver_(electric)', 'electric_shaver', 'electric_razor'], 'id': 939, 'def': 'a razor powered by an electric motor', 'name': 'shaver_(electric)'}, {'frequency': 'c', 'synset': 'shaving_cream.n.01', 'synonyms': ['shaving_cream', 'shaving_soap'], 'id': 940, 'def': 'toiletry consisting that forms a rich lather for softening the beard before shaving', 'name': 'shaving_cream'}, {'frequency': 'r', 'synset': 'shawl.n.01', 'synonyms': ['shawl'], 'id': 941, 'def': 'cloak consisting of an oblong piece of cloth used to cover the head and shoulders', 'name': 'shawl'}, {'frequency': 'r', 'synset': 'shears.n.01', 'synonyms': ['shears'], 'id': 942, 'def': 'large scissors with strong blades', 'name': 'shears'}, {'frequency': 'f', 'synset': 'sheep.n.01', 'synonyms': ['sheep'], 'id': 943, 'def': 'woolly usually horned ruminant mammal related to the goat', 'name': 'sheep'}, {'frequency': 'r', 'synset': 'shepherd_dog.n.01', 'synonyms': ['shepherd_dog', 'sheepdog'], 'id': 944, 'def': 'any of various usually long-haired breeds of dog reared to herd and guard sheep', 'name': 'shepherd_dog'}, {'frequency': 'r', 'synset': 'sherbert.n.01', 'synonyms': ['sherbert', 'sherbet'], 'id': 945, 'def': 'a frozen dessert made primarily of fruit juice and sugar', 'name': 'sherbert'}, {'frequency': 'c', 'synset': 'shield.n.02', 'synonyms': ['shield'], 'id': 946, 'def': 'armor carried on the arm to intercept blows', 'name': 'shield'}, {'frequency': 'f', 'synset': 'shirt.n.01', 'synonyms': ['shirt'], 'id': 947, 'def': 'a garment worn on the upper half of the body', 'name': 'shirt'}, {'frequency': 'f', 'synset': 'shoe.n.01', 'synonyms': ['shoe', 'sneaker_(type_of_shoe)', 'tennis_shoe'], 'id': 948, 'def': 'common footwear covering the foot', 'name': 'shoe'}, {'frequency': 'f', 'synset': 'shopping_bag.n.01', 'synonyms': ['shopping_bag'], 'id': 949, 'def': 'a bag made of plastic or strong paper (often with handles); used to transport goods after shopping', 'name': 'shopping_bag'}, {'frequency': 'c', 'synset': 'shopping_cart.n.01', 'synonyms': ['shopping_cart'], 'id': 950, 'def': 'a handcart that holds groceries or other goods while shopping', 'name': 'shopping_cart'}, {'frequency': 'f', 'synset': 'short_pants.n.01', 'synonyms': ['short_pants', 'shorts_(clothing)', 'trunks_(clothing)'], 'id': 951, 'def': 'trousers that end at or above the knee', 'name': 'short_pants'}, {'frequency': 'r', 'synset': 'shot_glass.n.01', 'synonyms': ['shot_glass'], 'id': 952, 'def': 'a small glass adequate to hold a single swallow of whiskey', 'name': 'shot_glass'}, {'frequency': 'f', 'synset': 'shoulder_bag.n.01', 'synonyms': ['shoulder_bag'], 'id': 953, 'def': 'a large handbag that can be carried by a strap looped over the shoulder', 'name': 'shoulder_bag'}, {'frequency': 'c', 'synset': 'shovel.n.01', 'synonyms': ['shovel'], 'id': 954, 'def': 'a hand tool for lifting loose material such as snow, dirt, etc.', 'name': 'shovel'}, {'frequency': 'f', 'synset': 'shower.n.01', 'synonyms': ['shower_head'], 'id': 955, 'def': 'a plumbing fixture that sprays water over you', 'name': 'shower_head'}, {'frequency': 'r', 'synset': 'shower_cap.n.01', 'synonyms': ['shower_cap'], 'id': 956, 'def': 'a tight cap worn to keep hair dry while showering', 'name': 'shower_cap'}, {'frequency': 'f', 'synset': 'shower_curtain.n.01', 'synonyms': ['shower_curtain'], 'id': 957, 'def': 'a curtain that keeps water from splashing out of the shower area', 'name': 'shower_curtain'}, {'frequency': 'r', 'synset': 'shredder.n.01', 'synonyms': ['shredder_(for_paper)'], 'id': 958, 'def': 'a device that shreds documents', 'name': 'shredder_(for_paper)'}, {'frequency': 'f', 'synset': 'signboard.n.01', 'synonyms': ['signboard'], 'id': 959, 'def': 'structure displaying a board on which advertisements can be posted', 'name': 'signboard'}, {'frequency': 'c', 'synset': 'silo.n.01', 'synonyms': ['silo'], 'id': 960, 'def': 'a cylindrical tower used for storing goods', 'name': 'silo'}, {'frequency': 'f', 'synset': 'sink.n.01', 'synonyms': ['sink'], 'id': 961, 'def': 'plumbing fixture consisting of a water basin fixed to a wall or floor and having a drainpipe', 'name': 'sink'}, {'frequency': 'f', 'synset': 'skateboard.n.01', 'synonyms': ['skateboard'], 'id': 962, 'def': 'a board with wheels that is ridden in a standing or crouching position and propelled by foot', 'name': 'skateboard'}, {'frequency': 'c', 'synset': 'skewer.n.01', 'synonyms': ['skewer'], 'id': 963, 'def': 'a long pin for holding meat in position while it is being roasted', 'name': 'skewer'}, {'frequency': 'f', 'synset': 'ski.n.01', 'synonyms': ['ski'], 'id': 964, 'def': 'sports equipment for skiing on snow', 'name': 'ski'}, {'frequency': 'f', 'synset': 'ski_boot.n.01', 'synonyms': ['ski_boot'], 'id': 965, 'def': 'a stiff boot that is fastened to a ski with a ski binding', 'name': 'ski_boot'}, {'frequency': 'f', 'synset': 'ski_parka.n.01', 'synonyms': ['ski_parka', 'ski_jacket'], 'id': 966, 'def': 'a parka to be worn while skiing', 'name': 'ski_parka'}, {'frequency': 'f', 'synset': 'ski_pole.n.01', 'synonyms': ['ski_pole'], 'id': 967, 'def': 'a pole with metal points used as an aid in skiing', 'name': 'ski_pole'}, {'frequency': 'f', 'synset': 'skirt.n.02', 'synonyms': ['skirt'], 'id': 968, 'def': 'a garment hanging from the waist; worn mainly by girls and women', 'name': 'skirt'}, {'frequency': 'r', 'synset': 'skullcap.n.01', 'synonyms': ['skullcap'], 'id': 969, 'def': 'rounded brimless cap fitting the crown of the head', 'name': 'skullcap'}, {'frequency': 'c', 'synset': 'sled.n.01', 'synonyms': ['sled', 'sledge', 'sleigh'], 'id': 970, 'def': 'a vehicle or flat object for transportation over snow by sliding or pulled by dogs, etc.', 'name': 'sled'}, {'frequency': 'c', 'synset': 'sleeping_bag.n.01', 'synonyms': ['sleeping_bag'], 'id': 971, 'def': 'large padded bag designed to be slept in outdoors', 'name': 'sleeping_bag'}, {'frequency': 'r', 'synset': 'sling.n.05', 'synonyms': ['sling_(bandage)', 'triangular_bandage'], 'id': 972, 'def': 'bandage to support an injured forearm; slung over the shoulder or neck', 'name': 'sling_(bandage)'}, {'frequency': 'c', 'synset': 'slipper.n.01', 'synonyms': ['slipper_(footwear)', 'carpet_slipper_(footwear)'], 'id': 973, 'def': 'low footwear that can be slipped on and off easily; usually worn indoors', 'name': 'slipper_(footwear)'}, {'frequency': 'r', 'synset': 'smoothie.n.02', 'synonyms': ['smoothie'], 'id': 974, 'def': 'a thick smooth drink consisting of fresh fruit pureed with ice cream or yoghurt or milk', 'name': 'smoothie'}, {'frequency': 'r', 'synset': 'snake.n.01', 'synonyms': ['snake', 'serpent'], 'id': 975, 'def': 'limbless scaly elongate reptile; some are venomous', 'name': 'snake'}, {'frequency': 'f', 'synset': 'snowboard.n.01', 'synonyms': ['snowboard'], 'id': 976, 'def': 'a board that resembles a broad ski or a small surfboard; used in a standing position to slide down snow-covered slopes', 'name': 'snowboard'}, {'frequency': 'c', 'synset': 'snowman.n.01', 'synonyms': ['snowman'], 'id': 977, 'def': 'a figure of a person made of packed snow', 'name': 'snowman'}, {'frequency': 'c', 'synset': 'snowmobile.n.01', 'synonyms': ['snowmobile'], 'id': 978, 'def': 'tracked vehicle for travel on snow having skis in front', 'name': 'snowmobile'}, {'frequency': 'f', 'synset': 'soap.n.01', 'synonyms': ['soap'], 'id': 979, 'def': 'a cleansing agent made from the salts of vegetable or animal fats', 'name': 'soap'}, {'frequency': 'f', 'synset': 'soccer_ball.n.01', 'synonyms': ['soccer_ball'], 'id': 980, 'def': "an inflated ball used in playing soccer (called `football' outside of the United States)", 'name': 'soccer_ball'}, {'frequency': 'f', 'synset': 'sock.n.01', 'synonyms': ['sock'], 'id': 981, 'def': 'cloth covering for the foot; worn inside the shoe; reaches to between the ankle and the knee', 'name': 'sock'}, {'frequency': 'f', 'synset': 'sofa.n.01', 'synonyms': ['sofa', 'couch', 'lounge'], 'id': 982, 'def': 'an upholstered seat for more than one person', 'name': 'sofa'}, {'frequency': 'r', 'synset': 'softball.n.01', 'synonyms': ['softball'], 'id': 983, 'def': 'ball used in playing softball', 'name': 'softball'}, {'frequency': 'c', 'synset': 'solar_array.n.01', 'synonyms': ['solar_array', 'solar_battery', 'solar_panel'], 'id': 984, 'def': 'electrical device consisting of a large array of connected solar cells', 'name': 'solar_array'}, {'frequency': 'r', 'synset': 'sombrero.n.02', 'synonyms': ['sombrero'], 'id': 985, 'def': 'a straw hat with a tall crown and broad brim; worn in American southwest and in Mexico', 'name': 'sombrero'}, {'frequency': 'f', 'synset': 'soup.n.01', 'synonyms': ['soup'], 'id': 986, 'def': 'liquid food especially of meat or fish or vegetable stock often containing pieces of solid food', 'name': 'soup'}, {'frequency': 'r', 'synset': 'soup_bowl.n.01', 'synonyms': ['soup_bowl'], 'id': 987, 'def': 'a bowl for serving soup', 'name': 'soup_bowl'}, {'frequency': 'c', 'synset': 'soupspoon.n.01', 'synonyms': ['soupspoon'], 'id': 988, 'def': 'a spoon with a rounded bowl for eating soup', 'name': 'soupspoon'}, {'frequency': 'c', 'synset': 'sour_cream.n.01', 'synonyms': ['sour_cream', 'soured_cream'], 'id': 989, 'def': 'soured light cream', 'name': 'sour_cream'}, {'frequency': 'r', 'synset': 'soya_milk.n.01', 'synonyms': ['soya_milk', 'soybean_milk', 'soymilk'], 'id': 990, 'def': 'a milk substitute containing soybean flour and water; used in some infant formulas and in making tofu', 'name': 'soya_milk'}, {'frequency': 'r', 'synset': 'space_shuttle.n.01', 'synonyms': ['space_shuttle'], 'id': 991, 'def': "a reusable spacecraft with wings for a controlled descent through the Earth's atmosphere", 'name': 'space_shuttle'}, {'frequency': 'r', 'synset': 'sparkler.n.02', 'synonyms': ['sparkler_(fireworks)'], 'id': 992, 'def': 'a firework that burns slowly and throws out a shower of sparks', 'name': 'sparkler_(fireworks)'}, {'frequency': 'f', 'synset': 'spatula.n.02', 'synonyms': ['spatula'], 'id': 993, 'def': 'a hand tool with a thin flexible blade used to mix or spread soft substances', 'name': 'spatula'}, {'frequency': 'r', 'synset': 'spear.n.01', 'synonyms': ['spear', 'lance'], 'id': 994, 'def': 'a long pointed rod used as a tool or weapon', 'name': 'spear'}, {'frequency': 'f', 'synset': 'spectacles.n.01', 'synonyms': ['spectacles', 'specs', 'eyeglasses', 'glasses'], 'id': 995, 'def': 'optical instrument consisting of a frame that holds a pair of lenses for correcting defective vision', 'name': 'spectacles'}, {'frequency': 'c', 'synset': 'spice_rack.n.01', 'synonyms': ['spice_rack'], 'id': 996, 'def': 'a rack for displaying containers filled with spices', 'name': 'spice_rack'}, {'frequency': 'c', 'synset': 'spider.n.01', 'synonyms': ['spider'], 'id': 997, 'def': 'predatory arachnid with eight legs, two poison fangs, two feelers, and usually two silk-spinning organs at the back end of the body', 'name': 'spider'}, {'frequency': 'r', 'synset': 'spiny_lobster.n.02', 'synonyms': ['crawfish', 'crayfish'], 'id': 998, 'def': 'large edible marine crustacean having a spiny carapace but lacking the large pincers of true lobsters', 'name': 'crawfish'}, {'frequency': 'c', 'synset': 'sponge.n.01', 'synonyms': ['sponge'], 'id': 999, 'def': 'a porous mass usable to absorb water typically used for cleaning', 'name': 'sponge'}, {'frequency': 'f', 'synset': 'spoon.n.01', 'synonyms': ['spoon'], 'id': 1000, 'def': 'a piece of cutlery with a shallow bowl-shaped container and a handle', 'name': 'spoon'}, {'frequency': 'c', 'synset': 'sportswear.n.01', 'synonyms': ['sportswear', 'athletic_wear', 'activewear'], 'id': 1001, 'def': 'attire worn for sport or for casual wear', 'name': 'sportswear'}, {'frequency': 'c', 'synset': 'spotlight.n.02', 'synonyms': ['spotlight'], 'id': 1002, 'def': 'a lamp that produces a strong beam of light to illuminate a restricted area; used to focus attention of a stage performer', 'name': 'spotlight'}, {'frequency': 'r', 'synset': 'squid.n.01', 'synonyms': ['squid_(food)', 'calamari', 'calamary'], 'id': 1003, 'def': '(Italian cuisine) squid prepared as food', 'name': 'squid_(food)'}, {'frequency': 'c', 'synset': 'squirrel.n.01', 'synonyms': ['squirrel'], 'id': 1004, 'def': 'a kind of arboreal rodent having a long bushy tail', 'name': 'squirrel'}, {'frequency': 'r', 'synset': 'stagecoach.n.01', 'synonyms': ['stagecoach'], 'id': 1005, 'def': 'a large coach-and-four formerly used to carry passengers and mail on regular routes between towns', 'name': 'stagecoach'}, {'frequency': 'c', 'synset': 'stapler.n.01', 'synonyms': ['stapler_(stapling_machine)'], 'id': 1006, 'def': 'a machine that inserts staples into sheets of paper in order to fasten them together', 'name': 'stapler_(stapling_machine)'}, {'frequency': 'c', 'synset': 'starfish.n.01', 'synonyms': ['starfish', 'sea_star'], 'id': 1007, 'def': 'echinoderms characterized by five arms extending from a central disk', 'name': 'starfish'}, {'frequency': 'f', 'synset': 'statue.n.01', 'synonyms': ['statue_(sculpture)'], 'id': 1008, 'def': 'a sculpture representing a human or animal', 'name': 'statue_(sculpture)'}, {'frequency': 'c', 'synset': 'steak.n.01', 'synonyms': ['steak_(food)'], 'id': 1009, 'def': 'a slice of meat cut from the fleshy part of an animal or large fish', 'name': 'steak_(food)'}, {'frequency': 'r', 'synset': 'steak_knife.n.01', 'synonyms': ['steak_knife'], 'id': 1010, 'def': 'a sharp table knife used in eating steak', 'name': 'steak_knife'}, {'frequency': 'f', 'synset': 'steering_wheel.n.01', 'synonyms': ['steering_wheel'], 'id': 1011, 'def': 'a handwheel that is used for steering', 'name': 'steering_wheel'}, {'frequency': 'r', 'synset': 'step_ladder.n.01', 'synonyms': ['stepladder'], 'id': 1012, 'def': 'a folding portable ladder hinged at the top', 'name': 'stepladder'}, {'frequency': 'c', 'synset': 'step_stool.n.01', 'synonyms': ['step_stool'], 'id': 1013, 'def': 'a stool that has one or two steps that fold under the seat', 'name': 'step_stool'}, {'frequency': 'c', 'synset': 'stereo.n.01', 'synonyms': ['stereo_(sound_system)'], 'id': 1014, 'def': 'electronic device for playing audio', 'name': 'stereo_(sound_system)'}, {'frequency': 'r', 'synset': 'stew.n.02', 'synonyms': ['stew'], 'id': 1015, 'def': 'food prepared by stewing especially meat or fish with vegetables', 'name': 'stew'}, {'frequency': 'r', 'synset': 'stirrer.n.02', 'synonyms': ['stirrer'], 'id': 1016, 'def': 'an implement used for stirring', 'name': 'stirrer'}, {'frequency': 'f', 'synset': 'stirrup.n.01', 'synonyms': ['stirrup'], 'id': 1017, 'def': "support consisting of metal loops into which rider's feet go", 'name': 'stirrup'}, {'frequency': 'f', 'synset': 'stool.n.01', 'synonyms': ['stool'], 'id': 1018, 'def': 'a simple seat without a back or arms', 'name': 'stool'}, {'frequency': 'f', 'synset': 'stop_sign.n.01', 'synonyms': ['stop_sign'], 'id': 1019, 'def': 'a traffic sign to notify drivers that they must come to a complete stop', 'name': 'stop_sign'}, {'frequency': 'f', 'synset': 'stoplight.n.01', 'synonyms': ['brake_light'], 'id': 1020, 'def': 'a red light on the rear of a motor vehicle that signals when the brakes are applied', 'name': 'brake_light'}, {'frequency': 'f', 'synset': 'stove.n.01', 'synonyms': ['stove', 'kitchen_stove', 'range_(kitchen_appliance)', 'kitchen_range', 'cooking_stove'], 'id': 1021, 'def': 'a kitchen appliance used for cooking food', 'name': 'stove'}, {'frequency': 'c', 'synset': 'strainer.n.01', 'synonyms': ['strainer'], 'id': 1022, 'def': 'a filter to retain larger pieces while smaller pieces and liquids pass through', 'name': 'strainer'}, {'frequency': 'f', 'synset': 'strap.n.01', 'synonyms': ['strap'], 'id': 1023, 'def': 'an elongated strip of material for binding things together or holding', 'name': 'strap'}, {'frequency': 'f', 'synset': 'straw.n.04', 'synonyms': ['straw_(for_drinking)', 'drinking_straw'], 'id': 1024, 'def': 'a thin paper or plastic tube used to suck liquids into the mouth', 'name': 'straw_(for_drinking)'}, {'frequency': 'f', 'synset': 'strawberry.n.01', 'synonyms': ['strawberry'], 'id': 1025, 'def': 'sweet fleshy red fruit', 'name': 'strawberry'}, {'frequency': 'f', 'synset': 'street_sign.n.01', 'synonyms': ['street_sign'], 'id': 1026, 'def': 'a sign visible from the street', 'name': 'street_sign'}, {'frequency': 'f', 'synset': 'streetlight.n.01', 'synonyms': ['streetlight', 'street_lamp'], 'id': 1027, 'def': 'a lamp supported on a lamppost; for illuminating a street', 'name': 'streetlight'}, {'frequency': 'r', 'synset': 'string_cheese.n.01', 'synonyms': ['string_cheese'], 'id': 1028, 'def': 'cheese formed in long strings twisted together', 'name': 'string_cheese'}, {'frequency': 'r', 'synset': 'stylus.n.02', 'synonyms': ['stylus'], 'id': 1029, 'def': 'a pointed tool for writing or drawing or engraving, including pens', 'name': 'stylus'}, {'frequency': 'r', 'synset': 'subwoofer.n.01', 'synonyms': ['subwoofer'], 'id': 1030, 'def': 'a loudspeaker that is designed to reproduce very low bass frequencies', 'name': 'subwoofer'}, {'frequency': 'r', 'synset': 'sugar_bowl.n.01', 'synonyms': ['sugar_bowl'], 'id': 1031, 'def': 'a dish in which sugar is served', 'name': 'sugar_bowl'}, {'frequency': 'r', 'synset': 'sugarcane.n.01', 'synonyms': ['sugarcane_(plant)'], 'id': 1032, 'def': 'juicy canes whose sap is a source of molasses and commercial sugar; fresh canes are sometimes chewed for the juice', 'name': 'sugarcane_(plant)'}, {'frequency': 'f', 'synset': 'suit.n.01', 'synonyms': ['suit_(clothing)'], 'id': 1033, 'def': 'a set of garments (usually including a jacket and trousers or skirt) for outerwear all of the same fabric and color', 'name': 'suit_(clothing)'}, {'frequency': 'c', 'synset': 'sunflower.n.01', 'synonyms': ['sunflower'], 'id': 1034, 'def': 'any plant of the genus Helianthus having large flower heads with dark disk florets and showy yellow rays', 'name': 'sunflower'}, {'frequency': 'f', 'synset': 'sunglasses.n.01', 'synonyms': ['sunglasses'], 'id': 1035, 'def': 'spectacles that are darkened or polarized to protect the eyes from the glare of the sun', 'name': 'sunglasses'}, {'frequency': 'c', 'synset': 'sunhat.n.01', 'synonyms': ['sunhat'], 'id': 1036, 'def': 'a hat with a broad brim that protects the face from direct exposure to the sun', 'name': 'sunhat'}, {'frequency': 'f', 'synset': 'surfboard.n.01', 'synonyms': ['surfboard'], 'id': 1037, 'def': 'a narrow buoyant board for riding surf', 'name': 'surfboard'}, {'frequency': 'c', 'synset': 'sushi.n.01', 'synonyms': ['sushi'], 'id': 1038, 'def': 'rice (with raw fish) wrapped in seaweed', 'name': 'sushi'}, {'frequency': 'c', 'synset': 'swab.n.02', 'synonyms': ['mop'], 'id': 1039, 'def': 'cleaning implement consisting of absorbent material fastened to a handle; for cleaning floors', 'name': 'mop'}, {'frequency': 'c', 'synset': 'sweat_pants.n.01', 'synonyms': ['sweat_pants'], 'id': 1040, 'def': 'loose-fitting trousers with elastic cuffs; worn by athletes', 'name': 'sweat_pants'}, {'frequency': 'c', 'synset': 'sweatband.n.02', 'synonyms': ['sweatband'], 'id': 1041, 'def': 'a band of material tied around the forehead or wrist to absorb sweat', 'name': 'sweatband'}, {'frequency': 'f', 'synset': 'sweater.n.01', 'synonyms': ['sweater'], 'id': 1042, 'def': 'a crocheted or knitted garment covering the upper part of the body', 'name': 'sweater'}, {'frequency': 'f', 'synset': 'sweatshirt.n.01', 'synonyms': ['sweatshirt'], 'id': 1043, 'def': 'cotton knit pullover with long sleeves worn during athletic activity', 'name': 'sweatshirt'}, {'frequency': 'c', 'synset': 'sweet_potato.n.02', 'synonyms': ['sweet_potato'], 'id': 1044, 'def': 'the edible tuberous root of the sweet potato vine', 'name': 'sweet_potato'}, {'frequency': 'f', 'synset': 'swimsuit.n.01', 'synonyms': ['swimsuit', 'swimwear', 'bathing_suit', 'swimming_costume', 'bathing_costume', 'swimming_trunks', 'bathing_trunks'], 'id': 1045, 'def': 'garment worn for swimming', 'name': 'swimsuit'}, {'frequency': 'c', 'synset': 'sword.n.01', 'synonyms': ['sword'], 'id': 1046, 'def': 'a cutting or thrusting weapon that has a long metal blade', 'name': 'sword'}, {'frequency': 'r', 'synset': 'syringe.n.01', 'synonyms': ['syringe'], 'id': 1047, 'def': 'a medical instrument used to inject or withdraw fluids', 'name': 'syringe'}, {'frequency': 'r', 'synset': 'tabasco.n.02', 'synonyms': ['Tabasco_sauce'], 'id': 1048, 'def': 'very spicy sauce (trade name Tabasco) made from fully-aged red peppers', 'name': 'Tabasco_sauce'}, {'frequency': 'r', 'synset': 'table-tennis_table.n.01', 'synonyms': ['table-tennis_table', 'ping-pong_table'], 'id': 1049, 'def': 'a table used for playing table tennis', 'name': 'table-tennis_table'}, {'frequency': 'f', 'synset': 'table.n.02', 'synonyms': ['table'], 'id': 1050, 'def': 'a piece of furniture having a smooth flat top that is usually supported by one or more vertical legs', 'name': 'table'}, {'frequency': 'c', 'synset': 'table_lamp.n.01', 'synonyms': ['table_lamp'], 'id': 1051, 'def': 'a lamp that sits on a table', 'name': 'table_lamp'}, {'frequency': 'f', 'synset': 'tablecloth.n.01', 'synonyms': ['tablecloth'], 'id': 1052, 'def': 'a covering spread over a dining table', 'name': 'tablecloth'}, {'frequency': 'r', 'synset': 'tachometer.n.01', 'synonyms': ['tachometer'], 'id': 1053, 'def': 'measuring instrument for indicating speed of rotation', 'name': 'tachometer'}, {'frequency': 'r', 'synset': 'taco.n.02', 'synonyms': ['taco'], 'id': 1054, 'def': 'a small tortilla cupped around a filling', 'name': 'taco'}, {'frequency': 'f', 'synset': 'tag.n.02', 'synonyms': ['tag'], 'id': 1055, 'def': 'a label associated with something for the purpose of identification or information', 'name': 'tag'}, {'frequency': 'f', 'synset': 'taillight.n.01', 'synonyms': ['taillight', 'rear_light'], 'id': 1056, 'def': 'lamp (usually red) mounted at the rear of a motor vehicle', 'name': 'taillight'}, {'frequency': 'r', 'synset': 'tambourine.n.01', 'synonyms': ['tambourine'], 'id': 1057, 'def': 'a shallow drum with a single drumhead and with metallic disks in the sides', 'name': 'tambourine'}, {'frequency': 'r', 'synset': 'tank.n.01', 'synonyms': ['army_tank', 'armored_combat_vehicle', 'armoured_combat_vehicle'], 'id': 1058, 'def': 'an enclosed armored military vehicle; has a cannon and moves on caterpillar treads', 'name': 'army_tank'}, {'frequency': 'f', 'synset': 'tank.n.02', 'synonyms': ['tank_(storage_vessel)', 'storage_tank'], 'id': 1059, 'def': 'a large (usually metallic) vessel for holding gases or liquids', 'name': 'tank_(storage_vessel)'}, {'frequency': 'f', 'synset': 'tank_top.n.01', 'synonyms': ['tank_top_(clothing)'], 'id': 1060, 'def': 'a tight-fitting sleeveless shirt with wide shoulder straps and low neck and no front opening', 'name': 'tank_top_(clothing)'}, {'frequency': 'f', 'synset': 'tape.n.01', 'synonyms': ['tape_(sticky_cloth_or_paper)'], 'id': 1061, 'def': 'a long thin piece of cloth or paper as used for binding or fastening', 'name': 'tape_(sticky_cloth_or_paper)'}, {'frequency': 'c', 'synset': 'tape.n.04', 'synonyms': ['tape_measure', 'measuring_tape'], 'id': 1062, 'def': 'measuring instrument consisting of a narrow strip (cloth or metal) marked in inches or centimeters and used for measuring lengths', 'name': 'tape_measure'}, {'frequency': 'c', 'synset': 'tapestry.n.02', 'synonyms': ['tapestry'], 'id': 1063, 'def': 'a heavy textile with a woven design; used for curtains and upholstery', 'name': 'tapestry'}, {'frequency': 'f', 'synset': 'tarpaulin.n.01', 'synonyms': ['tarp'], 'id': 1064, 'def': 'waterproofed canvas', 'name': 'tarp'}, {'frequency': 'c', 'synset': 'tartan.n.01', 'synonyms': ['tartan', 'plaid'], 'id': 1065, 'def': 'a cloth having a crisscross design', 'name': 'tartan'}, {'frequency': 'c', 'synset': 'tassel.n.01', 'synonyms': ['tassel'], 'id': 1066, 'def': 'adornment consisting of a bunch of cords fastened at one end', 'name': 'tassel'}, {'frequency': 'c', 'synset': 'tea_bag.n.01', 'synonyms': ['tea_bag'], 'id': 1067, 'def': 'a measured amount of tea in a bag for an individual serving of tea', 'name': 'tea_bag'}, {'frequency': 'c', 'synset': 'teacup.n.02', 'synonyms': ['teacup'], 'id': 1068, 'def': 'a cup from which tea is drunk', 'name': 'teacup'}, {'frequency': 'c', 'synset': 'teakettle.n.01', 'synonyms': ['teakettle'], 'id': 1069, 'def': 'kettle for boiling water to make tea', 'name': 'teakettle'}, {'frequency': 'f', 'synset': 'teapot.n.01', 'synonyms': ['teapot'], 'id': 1070, 'def': 'pot for brewing tea; usually has a spout and handle', 'name': 'teapot'}, {'frequency': 'f', 'synset': 'teddy.n.01', 'synonyms': ['teddy_bear'], 'id': 1071, 'def': "plaything consisting of a child's toy bear (usually plush and stuffed with soft materials)", 'name': 'teddy_bear'}, {'frequency': 'f', 'synset': 'telephone.n.01', 'synonyms': ['telephone', 'phone', 'telephone_set'], 'id': 1072, 'def': 'electronic device for communicating by voice over long distances (includes wired and wireless/cell phones)', 'name': 'telephone'}, {'frequency': 'c', 'synset': 'telephone_booth.n.01', 'synonyms': ['telephone_booth', 'phone_booth', 'call_box', 'telephone_box', 'telephone_kiosk'], 'id': 1073, 'def': 'booth for using a telephone', 'name': 'telephone_booth'}, {'frequency': 'f', 'synset': 'telephone_pole.n.01', 'synonyms': ['telephone_pole', 'telegraph_pole', 'telegraph_post'], 'id': 1074, 'def': 'tall pole supporting telephone wires', 'name': 'telephone_pole'}, {'frequency': 'r', 'synset': 'telephoto_lens.n.01', 'synonyms': ['telephoto_lens', 'zoom_lens'], 'id': 1075, 'def': 'a camera lens that magnifies the image', 'name': 'telephoto_lens'}, {'frequency': 'c', 'synset': 'television_camera.n.01', 'synonyms': ['television_camera', 'tv_camera'], 'id': 1076, 'def': 'television equipment for capturing and recording video', 'name': 'television_camera'}, {'frequency': 'f', 'synset': 'television_receiver.n.01', 'synonyms': ['television_set', 'tv', 'tv_set'], 'id': 1077, 'def': 'an electronic device that receives television signals and displays them on a screen', 'name': 'television_set'}, {'frequency': 'f', 'synset': 'tennis_ball.n.01', 'synonyms': ['tennis_ball'], 'id': 1078, 'def': 'ball about the size of a fist used in playing tennis', 'name': 'tennis_ball'}, {'frequency': 'f', 'synset': 'tennis_racket.n.01', 'synonyms': ['tennis_racket'], 'id': 1079, 'def': 'a racket used to play tennis', 'name': 'tennis_racket'}, {'frequency': 'r', 'synset': 'tequila.n.01', 'synonyms': ['tequila'], 'id': 1080, 'def': 'Mexican liquor made from fermented juices of an agave plant', 'name': 'tequila'}, {'frequency': 'c', 'synset': 'thermometer.n.01', 'synonyms': ['thermometer'], 'id': 1081, 'def': 'measuring instrument for measuring temperature', 'name': 'thermometer'}, {'frequency': 'c', 'synset': 'thermos.n.01', 'synonyms': ['thermos_bottle'], 'id': 1082, 'def': 'vacuum flask that preserves temperature of hot or cold drinks', 'name': 'thermos_bottle'}, {'frequency': 'f', 'synset': 'thermostat.n.01', 'synonyms': ['thermostat'], 'id': 1083, 'def': 'a regulator for automatically regulating temperature by starting or stopping the supply of heat', 'name': 'thermostat'}, {'frequency': 'r', 'synset': 'thimble.n.02', 'synonyms': ['thimble'], 'id': 1084, 'def': 'a small metal cap to protect the finger while sewing; can be used as a small container', 'name': 'thimble'}, {'frequency': 'c', 'synset': 'thread.n.01', 'synonyms': ['thread', 'yarn'], 'id': 1085, 'def': 'a fine cord of twisted fibers (of cotton or silk or wool or nylon etc.) used in sewing and weaving', 'name': 'thread'}, {'frequency': 'c', 'synset': 'thumbtack.n.01', 'synonyms': ['thumbtack', 'drawing_pin', 'pushpin'], 'id': 1086, 'def': 'a tack for attaching papers to a bulletin board or drawing board', 'name': 'thumbtack'}, {'frequency': 'c', 'synset': 'tiara.n.01', 'synonyms': ['tiara'], 'id': 1087, 'def': 'a jeweled headdress worn by women on formal occasions', 'name': 'tiara'}, {'frequency': 'c', 'synset': 'tiger.n.02', 'synonyms': ['tiger'], 'id': 1088, 'def': 'large feline of forests in most of Asia having a tawny coat with black stripes', 'name': 'tiger'}, {'frequency': 'c', 'synset': 'tights.n.01', 'synonyms': ['tights_(clothing)', 'leotards'], 'id': 1089, 'def': 'skintight knit hose covering the body from the waist to the feet worn by acrobats and dancers and as stockings by women and girls', 'name': 'tights_(clothing)'}, {'frequency': 'c', 'synset': 'timer.n.01', 'synonyms': ['timer', 'stopwatch'], 'id': 1090, 'def': 'a timepiece that measures a time interval and signals its end', 'name': 'timer'}, {'frequency': 'f', 'synset': 'tinfoil.n.01', 'synonyms': ['tinfoil'], 'id': 1091, 'def': 'foil made of tin or an alloy of tin and lead', 'name': 'tinfoil'}, {'frequency': 'c', 'synset': 'tinsel.n.01', 'synonyms': ['tinsel'], 'id': 1092, 'def': 'a showy decoration that is basically valueless', 'name': 'tinsel'}, {'frequency': 'f', 'synset': 'tissue.n.02', 'synonyms': ['tissue_paper'], 'id': 1093, 'def': 'a soft thin (usually translucent) paper', 'name': 'tissue_paper'}, {'frequency': 'c', 'synset': 'toast.n.01', 'synonyms': ['toast_(food)'], 'id': 1094, 'def': 'slice of bread that has been toasted', 'name': 'toast_(food)'}, {'frequency': 'f', 'synset': 'toaster.n.02', 'synonyms': ['toaster'], 'id': 1095, 'def': 'a kitchen appliance (usually electric) for toasting bread', 'name': 'toaster'}, {'frequency': 'f', 'synset': 'toaster_oven.n.01', 'synonyms': ['toaster_oven'], 'id': 1096, 'def': 'kitchen appliance consisting of a small electric oven for toasting or warming food', 'name': 'toaster_oven'}, {'frequency': 'f', 'synset': 'toilet.n.02', 'synonyms': ['toilet'], 'id': 1097, 'def': 'a plumbing fixture for defecation and urination', 'name': 'toilet'}, {'frequency': 'f', 'synset': 'toilet_tissue.n.01', 'synonyms': ['toilet_tissue', 'toilet_paper', 'bathroom_tissue'], 'id': 1098, 'def': 'a soft thin absorbent paper for use in toilets', 'name': 'toilet_tissue'}, {'frequency': 'f', 'synset': 'tomato.n.01', 'synonyms': ['tomato'], 'id': 1099, 'def': 'mildly acid red or yellow pulpy fruit eaten as a vegetable', 'name': 'tomato'}, {'frequency': 'f', 'synset': 'tongs.n.01', 'synonyms': ['tongs'], 'id': 1100, 'def': 'any of various devices for taking hold of objects; usually have two hinged legs with handles above and pointed hooks below', 'name': 'tongs'}, {'frequency': 'c', 'synset': 'toolbox.n.01', 'synonyms': ['toolbox'], 'id': 1101, 'def': 'a box or chest or cabinet for holding hand tools', 'name': 'toolbox'}, {'frequency': 'f', 'synset': 'toothbrush.n.01', 'synonyms': ['toothbrush'], 'id': 1102, 'def': 'small brush; has long handle; used to clean teeth', 'name': 'toothbrush'}, {'frequency': 'f', 'synset': 'toothpaste.n.01', 'synonyms': ['toothpaste'], 'id': 1103, 'def': 'a dentifrice in the form of a paste', 'name': 'toothpaste'}, {'frequency': 'f', 'synset': 'toothpick.n.01', 'synonyms': ['toothpick'], 'id': 1104, 'def': 'pick consisting of a small strip of wood or plastic; used to pick food from between the teeth', 'name': 'toothpick'}, {'frequency': 'f', 'synset': 'top.n.09', 'synonyms': ['cover'], 'id': 1105, 'def': 'covering for a hole (especially a hole in the top of a container)', 'name': 'cover'}, {'frequency': 'c', 'synset': 'tortilla.n.01', 'synonyms': ['tortilla'], 'id': 1106, 'def': 'thin unleavened pancake made from cornmeal or wheat flour', 'name': 'tortilla'}, {'frequency': 'c', 'synset': 'tow_truck.n.01', 'synonyms': ['tow_truck'], 'id': 1107, 'def': 'a truck equipped to hoist and pull wrecked cars (or to remove cars from no-parking zones)', 'name': 'tow_truck'}, {'frequency': 'f', 'synset': 'towel.n.01', 'synonyms': ['towel'], 'id': 1108, 'def': 'a rectangular piece of absorbent cloth (or paper) for drying or wiping', 'name': 'towel'}, {'frequency': 'f', 'synset': 'towel_rack.n.01', 'synonyms': ['towel_rack', 'towel_rail', 'towel_bar'], 'id': 1109, 'def': 'a rack consisting of one or more bars on which towels can be hung', 'name': 'towel_rack'}, {'frequency': 'f', 'synset': 'toy.n.03', 'synonyms': ['toy'], 'id': 1110, 'def': 'a device regarded as providing amusement', 'name': 'toy'}, {'frequency': 'c', 'synset': 'tractor.n.01', 'synonyms': ['tractor_(farm_equipment)'], 'id': 1111, 'def': 'a wheeled vehicle with large wheels; used in farming and other applications', 'name': 'tractor_(farm_equipment)'}, {'frequency': 'f', 'synset': 'traffic_light.n.01', 'synonyms': ['traffic_light'], 'id': 1112, 'def': 'a device to control vehicle traffic often consisting of three or more lights', 'name': 'traffic_light'}, {'frequency': 'c', 'synset': 'trail_bike.n.01', 'synonyms': ['dirt_bike'], 'id': 1113, 'def': 'a lightweight motorcycle equipped with rugged tires and suspension for off-road use', 'name': 'dirt_bike'}, {'frequency': 'f', 'synset': 'trailer_truck.n.01', 'synonyms': ['trailer_truck', 'tractor_trailer', 'trucking_rig', 'articulated_lorry', 'semi_truck'], 'id': 1114, 'def': 'a truck consisting of a tractor and trailer together', 'name': 'trailer_truck'}, {'frequency': 'f', 'synset': 'train.n.01', 'synonyms': ['train_(railroad_vehicle)', 'railroad_train'], 'id': 1115, 'def': 'public or private transport provided by a line of railway cars coupled together and drawn by a locomotive', 'name': 'train_(railroad_vehicle)'}, {'frequency': 'r', 'synset': 'trampoline.n.01', 'synonyms': ['trampoline'], 'id': 1116, 'def': 'gymnastic apparatus consisting of a strong canvas sheet attached with springs to a metal frame', 'name': 'trampoline'}, {'frequency': 'f', 'synset': 'tray.n.01', 'synonyms': ['tray'], 'id': 1117, 'def': 'an open receptacle for holding or displaying or serving articles or food', 'name': 'tray'}, {'frequency': 'r', 'synset': 'trench_coat.n.01', 'synonyms': ['trench_coat'], 'id': 1118, 'def': 'a military style raincoat; belted with deep pockets', 'name': 'trench_coat'}, {'frequency': 'r', 'synset': 'triangle.n.05', 'synonyms': ['triangle_(musical_instrument)'], 'id': 1119, 'def': 'a percussion instrument consisting of a metal bar bent in the shape of an open triangle', 'name': 'triangle_(musical_instrument)'}, {'frequency': 'c', 'synset': 'tricycle.n.01', 'synonyms': ['tricycle'], 'id': 1120, 'def': 'a vehicle with three wheels that is moved by foot pedals', 'name': 'tricycle'}, {'frequency': 'f', 'synset': 'tripod.n.01', 'synonyms': ['tripod'], 'id': 1121, 'def': 'a three-legged rack used for support', 'name': 'tripod'}, {'frequency': 'f', 'synset': 'trouser.n.01', 'synonyms': ['trousers', 'pants_(clothing)'], 'id': 1122, 'def': 'a garment extending from the waist to the knee or ankle, covering each leg separately', 'name': 'trousers'}, {'frequency': 'f', 'synset': 'truck.n.01', 'synonyms': ['truck'], 'id': 1123, 'def': 'an automotive vehicle suitable for hauling', 'name': 'truck'}, {'frequency': 'r', 'synset': 'truffle.n.03', 'synonyms': ['truffle_(chocolate)', 'chocolate_truffle'], 'id': 1124, 'def': 'creamy chocolate candy', 'name': 'truffle_(chocolate)'}, {'frequency': 'c', 'synset': 'trunk.n.02', 'synonyms': ['trunk'], 'id': 1125, 'def': 'luggage consisting of a large strong case used when traveling or for storage', 'name': 'trunk'}, {'frequency': 'r', 'synset': 'tub.n.02', 'synonyms': ['vat'], 'id': 1126, 'def': 'a large vessel for holding or storing liquids', 'name': 'vat'}, {'frequency': 'c', 'synset': 'turban.n.01', 'synonyms': ['turban'], 'id': 1127, 'def': 'a traditional headdress consisting of a long scarf wrapped around the head', 'name': 'turban'}, {'frequency': 'c', 'synset': 'turkey.n.04', 'synonyms': ['turkey_(food)'], 'id': 1128, 'def': 'flesh of large domesticated fowl usually roasted', 'name': 'turkey_(food)'}, {'frequency': 'r', 'synset': 'turnip.n.01', 'synonyms': ['turnip'], 'id': 1129, 'def': 'widely cultivated plant having a large fleshy edible white or yellow root', 'name': 'turnip'}, {'frequency': 'c', 'synset': 'turtle.n.02', 'synonyms': ['turtle'], 'id': 1130, 'def': 'any of various aquatic and land reptiles having a bony shell and flipper-like limbs for swimming', 'name': 'turtle'}, {'frequency': 'c', 'synset': 'turtleneck.n.01', 'synonyms': ['turtleneck_(clothing)', 'polo-neck'], 'id': 1131, 'def': 'a sweater or jersey with a high close-fitting collar', 'name': 'turtleneck_(clothing)'}, {'frequency': 'c', 'synset': 'typewriter.n.01', 'synonyms': ['typewriter'], 'id': 1132, 'def': 'hand-operated character printer for printing written messages one character at a time', 'name': 'typewriter'}, {'frequency': 'f', 'synset': 'umbrella.n.01', 'synonyms': ['umbrella'], 'id': 1133, 'def': 'a lightweight handheld collapsible canopy', 'name': 'umbrella'}, {'frequency': 'f', 'synset': 'underwear.n.01', 'synonyms': ['underwear', 'underclothes', 'underclothing', 'underpants'], 'id': 1134, 'def': 'undergarment worn next to the skin and under the outer garments', 'name': 'underwear'}, {'frequency': 'r', 'synset': 'unicycle.n.01', 'synonyms': ['unicycle'], 'id': 1135, 'def': 'a vehicle with a single wheel that is driven by pedals', 'name': 'unicycle'}, {'frequency': 'f', 'synset': 'urinal.n.01', 'synonyms': ['urinal'], 'id': 1136, 'def': 'a plumbing fixture (usually attached to the wall) used by men to urinate', 'name': 'urinal'}, {'frequency': 'c', 'synset': 'urn.n.01', 'synonyms': ['urn'], 'id': 1137, 'def': 'a large vase that usually has a pedestal or feet', 'name': 'urn'}, {'frequency': 'c', 'synset': 'vacuum.n.04', 'synonyms': ['vacuum_cleaner'], 'id': 1138, 'def': 'an electrical home appliance that cleans by suction', 'name': 'vacuum_cleaner'}, {'frequency': 'f', 'synset': 'vase.n.01', 'synonyms': ['vase'], 'id': 1139, 'def': 'an open jar of glass or porcelain used as an ornament or to hold flowers', 'name': 'vase'}, {'frequency': 'c', 'synset': 'vending_machine.n.01', 'synonyms': ['vending_machine'], 'id': 1140, 'def': 'a slot machine for selling goods', 'name': 'vending_machine'}, {'frequency': 'f', 'synset': 'vent.n.01', 'synonyms': ['vent', 'blowhole', 'air_vent'], 'id': 1141, 'def': 'a hole for the escape of gas or air', 'name': 'vent'}, {'frequency': 'f', 'synset': 'vest.n.01', 'synonyms': ['vest', 'waistcoat'], 'id': 1142, 'def': "a man's sleeveless garment worn underneath a coat", 'name': 'vest'}, {'frequency': 'c', 'synset': 'videotape.n.01', 'synonyms': ['videotape'], 'id': 1143, 'def': 'a video recording made on magnetic tape', 'name': 'videotape'}, {'frequency': 'r', 'synset': 'vinegar.n.01', 'synonyms': ['vinegar'], 'id': 1144, 'def': 'sour-tasting liquid produced usually by oxidation of the alcohol in wine or cider and used as a condiment or food preservative', 'name': 'vinegar'}, {'frequency': 'r', 'synset': 'violin.n.01', 'synonyms': ['violin', 'fiddle'], 'id': 1145, 'def': 'bowed stringed instrument that is the highest member of the violin family', 'name': 'violin'}, {'frequency': 'r', 'synset': 'vodka.n.01', 'synonyms': ['vodka'], 'id': 1146, 'def': 'unaged colorless liquor originating in Russia', 'name': 'vodka'}, {'frequency': 'c', 'synset': 'volleyball.n.02', 'synonyms': ['volleyball'], 'id': 1147, 'def': 'an inflated ball used in playing volleyball', 'name': 'volleyball'}, {'frequency': 'r', 'synset': 'vulture.n.01', 'synonyms': ['vulture'], 'id': 1148, 'def': 'any of various large birds of prey having naked heads and weak claws and feeding chiefly on carrion', 'name': 'vulture'}, {'frequency': 'c', 'synset': 'waffle.n.01', 'synonyms': ['waffle'], 'id': 1149, 'def': 'pancake batter baked in a waffle iron', 'name': 'waffle'}, {'frequency': 'r', 'synset': 'waffle_iron.n.01', 'synonyms': ['waffle_iron'], 'id': 1150, 'def': 'a kitchen appliance for baking waffles', 'name': 'waffle_iron'}, {'frequency': 'c', 'synset': 'wagon.n.01', 'synonyms': ['wagon'], 'id': 1151, 'def': 'any of various kinds of wheeled vehicles drawn by an animal or a tractor', 'name': 'wagon'}, {'frequency': 'c', 'synset': 'wagon_wheel.n.01', 'synonyms': ['wagon_wheel'], 'id': 1152, 'def': 'a wheel of a wagon', 'name': 'wagon_wheel'}, {'frequency': 'c', 'synset': 'walking_stick.n.01', 'synonyms': ['walking_stick'], 'id': 1153, 'def': 'a stick carried in the hand for support in walking', 'name': 'walking_stick'}, {'frequency': 'c', 'synset': 'wall_clock.n.01', 'synonyms': ['wall_clock'], 'id': 1154, 'def': 'a clock mounted on a wall', 'name': 'wall_clock'}, {'frequency': 'f', 'synset': 'wall_socket.n.01', 'synonyms': ['wall_socket', 'wall_plug', 'electric_outlet', 'electrical_outlet', 'outlet', 'electric_receptacle'], 'id': 1155, 'def': 'receptacle providing a place in a wiring system where current can be taken to run electrical devices', 'name': 'wall_socket'}, {'frequency': 'f', 'synset': 'wallet.n.01', 'synonyms': ['wallet', 'billfold'], 'id': 1156, 'def': 'a pocket-size case for holding papers and paper money', 'name': 'wallet'}, {'frequency': 'r', 'synset': 'walrus.n.01', 'synonyms': ['walrus'], 'id': 1157, 'def': 'either of two large northern marine mammals having ivory tusks and tough hide over thick blubber', 'name': 'walrus'}, {'frequency': 'r', 'synset': 'wardrobe.n.01', 'synonyms': ['wardrobe'], 'id': 1158, 'def': 'a tall piece of furniture that provides storage space for clothes; has a door and rails or hooks for hanging clothes', 'name': 'wardrobe'}, {'frequency': 'r', 'synset': 'washbasin.n.01', 'synonyms': ['washbasin', 'basin_(for_washing)', 'washbowl', 'washstand', 'handbasin'], 'id': 1159, 'def': 'a bathroom sink that is permanently installed and connected to a water supply and drainpipe; where you can wash your hands and face', 'name': 'washbasin'}, {'frequency': 'c', 'synset': 'washer.n.03', 'synonyms': ['automatic_washer', 'washing_machine'], 'id': 1160, 'def': 'a home appliance for washing clothes and linens automatically', 'name': 'automatic_washer'}, {'frequency': 'f', 'synset': 'watch.n.01', 'synonyms': ['watch', 'wristwatch'], 'id': 1161, 'def': 'a small, portable timepiece', 'name': 'watch'}, {'frequency': 'f', 'synset': 'water_bottle.n.01', 'synonyms': ['water_bottle'], 'id': 1162, 'def': 'a bottle for holding water', 'name': 'water_bottle'}, {'frequency': 'c', 'synset': 'water_cooler.n.01', 'synonyms': ['water_cooler'], 'id': 1163, 'def': 'a device for cooling and dispensing drinking water', 'name': 'water_cooler'}, {'frequency': 'c', 'synset': 'water_faucet.n.01', 'synonyms': ['water_faucet', 'water_tap', 'tap_(water_faucet)'], 'id': 1164, 'def': 'a faucet for drawing water from a pipe or cask', 'name': 'water_faucet'}, {'frequency': 'r', 'synset': 'water_heater.n.01', 'synonyms': ['water_heater', 'hot-water_heater'], 'id': 1165, 'def': 'a heater and storage tank to supply heated water', 'name': 'water_heater'}, {'frequency': 'c', 'synset': 'water_jug.n.01', 'synonyms': ['water_jug'], 'id': 1166, 'def': 'a jug that holds water', 'name': 'water_jug'}, {'frequency': 'r', 'synset': 'water_pistol.n.01', 'synonyms': ['water_gun', 'squirt_gun'], 'id': 1167, 'def': 'plaything consisting of a toy pistol that squirts water', 'name': 'water_gun'}, {'frequency': 'c', 'synset': 'water_scooter.n.01', 'synonyms': ['water_scooter', 'sea_scooter', 'jet_ski'], 'id': 1168, 'def': 'a motorboat resembling a motor scooter (NOT A SURFBOARD OR WATER SKI)', 'name': 'water_scooter'}, {'frequency': 'c', 'synset': 'water_ski.n.01', 'synonyms': ['water_ski'], 'id': 1169, 'def': 'broad ski for skimming over water towed by a speedboat (DO NOT MARK WATER)', 'name': 'water_ski'}, {'frequency': 'c', 'synset': 'water_tower.n.01', 'synonyms': ['water_tower'], 'id': 1170, 'def': 'a large reservoir for water', 'name': 'water_tower'}, {'frequency': 'c', 'synset': 'watering_can.n.01', 'synonyms': ['watering_can'], 'id': 1171, 'def': 'a container with a handle and a spout with a perforated nozzle; used to sprinkle water over plants', 'name': 'watering_can'}, {'frequency': 'f', 'synset': 'watermelon.n.02', 'synonyms': ['watermelon'], 'id': 1172, 'def': 'large oblong or roundish melon with a hard green rind and sweet watery red or occasionally yellowish pulp', 'name': 'watermelon'}, {'frequency': 'f', 'synset': 'weathervane.n.01', 'synonyms': ['weathervane', 'vane_(weathervane)', 'wind_vane'], 'id': 1173, 'def': 'mechanical device attached to an elevated structure; rotates freely to show the direction of the wind', 'name': 'weathervane'}, {'frequency': 'c', 'synset': 'webcam.n.01', 'synonyms': ['webcam'], 'id': 1174, 'def': 'a digital camera designed to take digital photographs and transmit them over the internet', 'name': 'webcam'}, {'frequency': 'c', 'synset': 'wedding_cake.n.01', 'synonyms': ['wedding_cake', 'bridecake'], 'id': 1175, 'def': 'a rich cake with two or more tiers and covered with frosting and decorations; served at a wedding reception', 'name': 'wedding_cake'}, {'frequency': 'c', 'synset': 'wedding_ring.n.01', 'synonyms': ['wedding_ring', 'wedding_band'], 'id': 1176, 'def': 'a ring given to the bride and/or groom at the wedding', 'name': 'wedding_ring'}, {'frequency': 'f', 'synset': 'wet_suit.n.01', 'synonyms': ['wet_suit'], 'id': 1177, 'def': 'a close-fitting garment made of a permeable material; worn in cold water to retain body heat', 'name': 'wet_suit'}, {'frequency': 'f', 'synset': 'wheel.n.01', 'synonyms': ['wheel'], 'id': 1178, 'def': 'a circular frame with spokes (or a solid disc) that can rotate on a shaft or axle', 'name': 'wheel'}, {'frequency': 'c', 'synset': 'wheelchair.n.01', 'synonyms': ['wheelchair'], 'id': 1179, 'def': 'a movable chair mounted on large wheels', 'name': 'wheelchair'}, {'frequency': 'c', 'synset': 'whipped_cream.n.01', 'synonyms': ['whipped_cream'], 'id': 1180, 'def': 'cream that has been beaten until light and fluffy', 'name': 'whipped_cream'}, {'frequency': 'c', 'synset': 'whistle.n.03', 'synonyms': ['whistle'], 'id': 1181, 'def': 'a small wind instrument that produces a whistling sound by blowing into it', 'name': 'whistle'}, {'frequency': 'c', 'synset': 'wig.n.01', 'synonyms': ['wig'], 'id': 1182, 'def': 'hairpiece covering the head and made of real or synthetic hair', 'name': 'wig'}, {'frequency': 'c', 'synset': 'wind_chime.n.01', 'synonyms': ['wind_chime'], 'id': 1183, 'def': 'a decorative arrangement of pieces of metal or glass or pottery that hang together loosely so the wind can cause them to tinkle', 'name': 'wind_chime'}, {'frequency': 'c', 'synset': 'windmill.n.01', 'synonyms': ['windmill'], 'id': 1184, 'def': 'A mill or turbine that is powered by wind', 'name': 'windmill'}, {'frequency': 'c', 'synset': 'window_box.n.01', 'synonyms': ['window_box_(for_plants)'], 'id': 1185, 'def': 'a container for growing plants on a windowsill', 'name': 'window_box_(for_plants)'}, {'frequency': 'f', 'synset': 'windshield_wiper.n.01', 'synonyms': ['windshield_wiper', 'windscreen_wiper', 'wiper_(for_windshield/screen)'], 'id': 1186, 'def': 'a mechanical device that cleans the windshield', 'name': 'windshield_wiper'}, {'frequency': 'c', 'synset': 'windsock.n.01', 'synonyms': ['windsock', 'air_sock', 'air-sleeve', 'wind_sleeve', 'wind_cone'], 'id': 1187, 'def': 'a truncated cloth cone mounted on a mast/pole; shows wind direction', 'name': 'windsock'}, {'frequency': 'f', 'synset': 'wine_bottle.n.01', 'synonyms': ['wine_bottle'], 'id': 1188, 'def': 'a bottle for holding wine', 'name': 'wine_bottle'}, {'frequency': 'c', 'synset': 'wine_bucket.n.01', 'synonyms': ['wine_bucket', 'wine_cooler'], 'id': 1189, 'def': 'a bucket of ice used to chill a bottle of wine', 'name': 'wine_bucket'}, {'frequency': 'f', 'synset': 'wineglass.n.01', 'synonyms': ['wineglass'], 'id': 1190, 'def': 'a glass that has a stem and in which wine is served', 'name': 'wineglass'}, {'frequency': 'f', 'synset': 'winker.n.02', 'synonyms': ['blinder_(for_horses)'], 'id': 1191, 'def': 'blinds that prevent a horse from seeing something on either side', 'name': 'blinder_(for_horses)'}, {'frequency': 'c', 'synset': 'wok.n.01', 'synonyms': ['wok'], 'id': 1192, 'def': 'pan with a convex bottom; used for frying in Chinese cooking', 'name': 'wok'}, {'frequency': 'r', 'synset': 'wolf.n.01', 'synonyms': ['wolf'], 'id': 1193, 'def': 'a wild carnivorous mammal of the dog family, living and hunting in packs', 'name': 'wolf'}, {'frequency': 'c', 'synset': 'wooden_spoon.n.02', 'synonyms': ['wooden_spoon'], 'id': 1194, 'def': 'a spoon made of wood', 'name': 'wooden_spoon'}, {'frequency': 'c', 'synset': 'wreath.n.01', 'synonyms': ['wreath'], 'id': 1195, 'def': 'an arrangement of flowers, leaves, or stems fastened in a ring', 'name': 'wreath'}, {'frequency': 'c', 'synset': 'wrench.n.03', 'synonyms': ['wrench', 'spanner'], 'id': 1196, 'def': 'a hand tool that is used to hold or twist a nut or bolt', 'name': 'wrench'}, {'frequency': 'f', 'synset': 'wristband.n.01', 'synonyms': ['wristband'], 'id': 1197, 'def': 'band consisting of a part of a sleeve that covers the wrist', 'name': 'wristband'}, {'frequency': 'f', 'synset': 'wristlet.n.01', 'synonyms': ['wristlet', 'wrist_band'], 'id': 1198, 'def': 'a band or bracelet worn around the wrist', 'name': 'wristlet'}, {'frequency': 'c', 'synset': 'yacht.n.01', 'synonyms': ['yacht'], 'id': 1199, 'def': 'an expensive vessel propelled by sail or power and used for cruising or racing', 'name': 'yacht'}, {'frequency': 'c', 'synset': 'yogurt.n.01', 'synonyms': ['yogurt', 'yoghurt', 'yoghourt'], 'id': 1200, 'def': 'a custard-like food made from curdled milk', 'name': 'yogurt'}, {'frequency': 'c', 'synset': 'yoke.n.07', 'synonyms': ['yoke_(animal_equipment)'], 'id': 1201, 'def': 'gear joining two animals at the neck; NOT egg yolk', 'name': 'yoke_(animal_equipment)'}, {'frequency': 'f', 'synset': 'zebra.n.01', 'synonyms': ['zebra'], 'id': 1202, 'def': 'any of several fleet black-and-white striped African equines', 'name': 'zebra'}, {'frequency': 'c', 'synset': 'zucchini.n.02', 'synonyms': ['zucchini', 'courgette'], 'id': 1203, 'def': 'small cucumber-shaped vegetable marrow; typically dark green', 'name': 'zucchini'}] # noqa
# fmt: on
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/lvis_v1_categories.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
from .coco import load_coco_json, load_sem_seg, register_coco_instances
from .coco_panoptic import register_coco_panoptic, register_coco_panoptic_separated
from .lvis import load_lvis_json, register_lvis_instances, get_lvis_instances_meta
from .pascal_voc import load_voc_instances, register_pascal_voc
from . import builtin as _builtin # ensure the builtin datasets are registered
__all__ = [k for k in globals().keys() if not k.startswith("_")]
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/__init__.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
import logging
import os
from fvcore.common.timer import Timer
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.structures import BoxMode
from detectron2.utils.file_io import PathManager
from .builtin_meta import _get_coco_instances_meta
from .lvis_v0_5_categories import LVIS_CATEGORIES as LVIS_V0_5_CATEGORIES
from .lvis_v1_categories import LVIS_CATEGORIES as LVIS_V1_CATEGORIES
"""
This file contains functions to parse LVIS-format annotations into dicts in the
"Detectron2 format".
"""
logger = logging.getLogger(__name__)
__all__ = ["load_lvis_json", "register_lvis_instances", "get_lvis_instances_meta"]
def register_lvis_instances(name, metadata, json_file, image_root):
"""
Register a dataset in LVIS's json annotation format for instance detection and segmentation.
Args:
name (str): a name that identifies the dataset, e.g. "lvis_v0.5_train".
metadata (dict): extra metadata associated with this dataset. It can be an empty dict.
json_file (str): path to the json instance annotation file.
image_root (str or path-like): directory which contains all the images.
"""
DatasetCatalog.register(name, lambda: load_lvis_json(json_file, image_root, name))
MetadataCatalog.get(name).set(
json_file=json_file, image_root=image_root, evaluator_type="lvis", **metadata
)
def load_lvis_json(json_file, image_root, dataset_name=None):
"""
Load a json file in LVIS's annotation format.
Args:
json_file (str): full path to the LVIS json annotation file.
image_root (str): the directory where the images in this json file exists.
dataset_name (str): the name of the dataset (e.g., "lvis_v0.5_train").
If provided, this function will put "thing_classes" into the metadata
associated with this dataset.
Returns:
list[dict]: a list of dicts in Detectron2 standard format. (See
`Using Custom Datasets </tutorials/datasets.html>`_ )
Notes:
1. This function does not read the image files.
The results do not have the "image" field.
"""
from lvis import LVIS
json_file = PathManager.get_local_path(json_file)
timer = Timer()
lvis_api = LVIS(json_file)
if timer.seconds() > 1:
logger.info("Loading {} takes {:.2f} seconds.".format(json_file, timer.seconds()))
if dataset_name is not None:
meta = get_lvis_instances_meta(dataset_name)
MetadataCatalog.get(dataset_name).set(**meta)
# sort indices for reproducible results
img_ids = sorted(lvis_api.imgs.keys())
# imgs is a list of dicts, each looks something like:
# {'license': 4,
# 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg',
# 'file_name': 'COCO_val2014_000000001268.jpg',
# 'height': 427,
# 'width': 640,
# 'date_captured': '2013-11-17 05:57:24',
# 'id': 1268}
imgs = lvis_api.load_imgs(img_ids)
# anns is a list[list[dict]], where each dict is an annotation
# record for an object. The inner list enumerates the objects in an image
# and the outer list enumerates over images. Example of anns[0]:
# [{'segmentation': [[192.81,
# 247.09,
# ...
# 219.03,
# 249.06]],
# 'area': 1035.749,
# 'image_id': 1268,
# 'bbox': [192.81, 224.8, 74.73, 33.43],
# 'category_id': 16,
# 'id': 42986},
# ...]
anns = [lvis_api.img_ann_map[img_id] for img_id in img_ids]
# Sanity check that each annotation has a unique id
ann_ids = [ann["id"] for anns_per_image in anns for ann in anns_per_image]
assert len(set(ann_ids)) == len(ann_ids), "Annotation ids in '{}' are not unique".format(
json_file
)
imgs_anns = list(zip(imgs, anns))
logger.info("Loaded {} images in the LVIS format from {}".format(len(imgs_anns), json_file))
def get_file_name(img_root, img_dict):
# Determine the path including the split folder ("train2017", "val2017", "test2017") from
# the coco_url field. Example:
# 'coco_url': 'http://images.cocodataset.org/train2017/000000155379.jpg'
split_folder, file_name = img_dict["coco_url"].split("/")[-2:]
return os.path.join(img_root + split_folder, file_name)
dataset_dicts = []
for (img_dict, anno_dict_list) in imgs_anns:
record = {}
record["file_name"] = get_file_name(image_root, img_dict)
record["height"] = img_dict["height"]
record["width"] = img_dict["width"]
record["not_exhaustive_category_ids"] = img_dict.get("not_exhaustive_category_ids", [])
record["neg_category_ids"] = img_dict.get("neg_category_ids", [])
image_id = record["image_id"] = img_dict["id"]
objs = []
for anno in anno_dict_list:
# Check that the image_id in this annotation is the same as
# the image_id we're looking at.
# This fails only when the data parsing logic or the annotation file is buggy.
assert anno["image_id"] == image_id
obj = {"bbox": anno["bbox"], "bbox_mode": BoxMode.XYWH_ABS}
# LVIS data loader can be used to load COCO dataset categories. In this case `meta`
# variable will have a field with COCO-specific category mapping.
if dataset_name is not None and "thing_dataset_id_to_contiguous_id" in meta:
obj["category_id"] = meta["thing_dataset_id_to_contiguous_id"][anno["category_id"]]
else:
obj["category_id"] = anno["category_id"] - 1 # Convert 1-indexed to 0-indexed
segm = anno["segmentation"] # list[list[float]]
# filter out invalid polygons (< 3 points)
valid_segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6]
assert len(segm) == len(
valid_segm
), "Annotation contains an invalid polygon with < 3 points"
assert len(segm) > 0
obj["segmentation"] = segm
objs.append(obj)
record["annotations"] = objs
dataset_dicts.append(record)
return dataset_dicts
def get_lvis_instances_meta(dataset_name):
"""
Load LVIS metadata.
Args:
dataset_name (str): LVIS dataset name without the split name (e.g., "lvis_v0.5").
Returns:
dict: LVIS metadata with keys: thing_classes
"""
if "cocofied" in dataset_name:
return _get_coco_instances_meta()
if "v0.5" in dataset_name:
return _get_lvis_instances_meta_v0_5()
elif "v1" in dataset_name:
return _get_lvis_instances_meta_v1()
raise ValueError("No built-in metadata for dataset {}".format(dataset_name))
def _get_lvis_instances_meta_v0_5():
assert len(LVIS_V0_5_CATEGORIES) == 1230
cat_ids = [k["id"] for k in LVIS_V0_5_CATEGORIES]
assert min(cat_ids) == 1 and max(cat_ids) == len(
cat_ids
), "Category ids are not in [1, #categories], as expected"
# Ensure that the category list is sorted by id
lvis_categories = sorted(LVIS_V0_5_CATEGORIES, key=lambda x: x["id"])
thing_classes = [k["synonyms"][0] for k in lvis_categories]
meta = {"thing_classes": thing_classes}
return meta
def _get_lvis_instances_meta_v1():
assert len(LVIS_V1_CATEGORIES) == 1203
cat_ids = [k["id"] for k in LVIS_V1_CATEGORIES]
assert min(cat_ids) == 1 and max(cat_ids) == len(
cat_ids
), "Category ids are not in [1, #categories], as expected"
# Ensure that the category list is sorted by id
lvis_categories = sorted(LVIS_V1_CATEGORIES, key=lambda x: x["id"])
thing_classes = [k["synonyms"][0] for k in lvis_categories]
meta = {"thing_classes": thing_classes}
return meta
if __name__ == "__main__":
"""
Test the LVIS json dataset loader.
Usage:
python -m detectron2.data.datasets.lvis \
path/to/json path/to/image_root dataset_name vis_limit
"""
import sys
import numpy as np
from detectron2.utils.logger import setup_logger
from PIL import Image
import detectron2.data.datasets # noqa # add pre-defined metadata
from detectron2.utils.visualizer import Visualizer
logger = setup_logger(name=__name__)
meta = MetadataCatalog.get(sys.argv[3])
dicts = load_lvis_json(sys.argv[1], sys.argv[2], sys.argv[3])
logger.info("Done loading {} samples.".format(len(dicts)))
dirname = "lvis-data-vis"
os.makedirs(dirname, exist_ok=True)
for d in dicts[: int(sys.argv[4])]:
img = np.array(Image.open(d["file_name"]))
visualizer = Visualizer(img, metadata=meta)
vis = visualizer.draw_dataset_dict(d)
fpath = os.path.join(dirname, os.path.basename(d["file_name"]))
vis.save(fpath)
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/lvis.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
import copy
import json
import os
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.utils.file_io import PathManager
from .coco import load_coco_json, load_sem_seg
__all__ = ["register_coco_panoptic", "register_coco_panoptic_separated"]
def load_coco_panoptic_json(json_file, image_dir, gt_dir, meta):
"""
Args:
image_dir (str): path to the raw dataset. e.g., "~/coco/train2017".
gt_dir (str): path to the raw annotations. e.g., "~/coco/panoptic_train2017".
json_file (str): path to the json file. e.g., "~/coco/annotations/panoptic_train2017.json".
Returns:
list[dict]: a list of dicts in Detectron2 standard format. (See
`Using Custom Datasets </tutorials/datasets.html>`_ )
"""
def _convert_category_id(segment_info, meta):
if segment_info["category_id"] in meta["thing_dataset_id_to_contiguous_id"]:
segment_info["category_id"] = meta["thing_dataset_id_to_contiguous_id"][
segment_info["category_id"]
]
segment_info["isthing"] = True
else:
segment_info["category_id"] = meta["stuff_dataset_id_to_contiguous_id"][
segment_info["category_id"]
]
segment_info["isthing"] = False
return segment_info
with PathManager.open(json_file) as f:
json_info = json.load(f)
ret = []
for ann in json_info["annotations"]:
image_id = int(ann["image_id"])
# TODO: currently we assume image and label has the same filename but
# different extension, and images have extension ".jpg" for COCO. Need
# to make image extension a user-provided argument if we extend this
# function to support other COCO-like datasets.
image_file = os.path.join(image_dir, os.path.splitext(ann["file_name"])[0] + ".jpg")
label_file = os.path.join(gt_dir, ann["file_name"])
segments_info = [_convert_category_id(x, meta) for x in ann["segments_info"]]
ret.append(
{
"file_name": image_file,
"image_id": image_id,
"pan_seg_file_name": label_file,
"segments_info": segments_info,
}
)
assert len(ret), f"No images found in {image_dir}!"
assert PathManager.isfile(ret[0]["file_name"]), ret[0]["file_name"]
assert PathManager.isfile(ret[0]["pan_seg_file_name"]), ret[0]["pan_seg_file_name"]
return ret
def register_coco_panoptic(
name, metadata, image_root, panoptic_root, panoptic_json, instances_json=None
):
"""
Register a "standard" version of COCO panoptic segmentation dataset named `name`.
The dictionaries in this registered dataset follows detectron2's standard format.
Hence it's called "standard".
Args:
name (str): the name that identifies a dataset,
e.g. "coco_2017_train_panoptic"
metadata (dict): extra metadata associated with this dataset.
image_root (str): directory which contains all the images
panoptic_root (str): directory which contains panoptic annotation images in COCO format
panoptic_json (str): path to the json panoptic annotation file in COCO format
sem_seg_root (none): not used, to be consistent with
`register_coco_panoptic_separated`.
instances_json (str): path to the json instance annotation file
"""
panoptic_name = name
DatasetCatalog.register(
panoptic_name,
lambda: load_coco_panoptic_json(panoptic_json, image_root, panoptic_root, metadata),
)
MetadataCatalog.get(panoptic_name).set(
panoptic_root=panoptic_root,
image_root=image_root,
panoptic_json=panoptic_json,
json_file=instances_json,
evaluator_type="coco_panoptic_seg",
ignore_label=255,
label_divisor=1000,
**metadata,
)
def register_coco_panoptic_separated(
name, metadata, image_root, panoptic_root, panoptic_json, sem_seg_root, instances_json
):
"""
Register a "separated" version of COCO panoptic segmentation dataset named `name`.
The annotations in this registered dataset will contain both instance annotations and
semantic annotations, each with its own contiguous ids. Hence it's called "separated".
It follows the setting used by the PanopticFPN paper:
1. The instance annotations directly come from polygons in the COCO
instances annotation task, rather than from the masks in the COCO panoptic annotations.
The two format have small differences:
Polygons in the instance annotations may have overlaps.
The mask annotations are produced by labeling the overlapped polygons
with depth ordering.
2. The semantic annotations are converted from panoptic annotations, where
all "things" are assigned a semantic id of 0.
All semantic categories will therefore have ids in contiguous
range [1, #stuff_categories].
This function will also register a pure semantic segmentation dataset
named ``name + '_stuffonly'``.
Args:
name (str): the name that identifies a dataset,
e.g. "coco_2017_train_panoptic"
metadata (dict): extra metadata associated with this dataset.
image_root (str): directory which contains all the images
panoptic_root (str): directory which contains panoptic annotation images
panoptic_json (str): path to the json panoptic annotation file
sem_seg_root (str): directory which contains all the ground truth segmentation annotations.
instances_json (str): path to the json instance annotation file
"""
panoptic_name = name + "_separated"
DatasetCatalog.register(
panoptic_name,
lambda: merge_to_panoptic(
load_coco_json(instances_json, image_root, panoptic_name),
load_sem_seg(sem_seg_root, image_root),
),
)
MetadataCatalog.get(panoptic_name).set(
panoptic_root=panoptic_root,
image_root=image_root,
panoptic_json=panoptic_json,
sem_seg_root=sem_seg_root,
json_file=instances_json, # TODO rename
evaluator_type="coco_panoptic_seg",
ignore_label=255,
**metadata,
)
semantic_name = name + "_stuffonly"
DatasetCatalog.register(semantic_name, lambda: load_sem_seg(sem_seg_root, image_root))
MetadataCatalog.get(semantic_name).set(
sem_seg_root=sem_seg_root,
image_root=image_root,
evaluator_type="sem_seg",
ignore_label=255,
**metadata,
)
def merge_to_panoptic(detection_dicts, sem_seg_dicts):
"""
Create dataset dicts for panoptic segmentation, by
merging two dicts using "file_name" field to match their entries.
Args:
detection_dicts (list[dict]): lists of dicts for object detection or instance segmentation.
sem_seg_dicts (list[dict]): lists of dicts for semantic segmentation.
Returns:
list[dict] (one per input image): Each dict contains all (key, value) pairs from dicts in
both detection_dicts and sem_seg_dicts that correspond to the same image.
The function assumes that the same key in different dicts has the same value.
"""
results = []
sem_seg_file_to_entry = {x["file_name"]: x for x in sem_seg_dicts}
assert len(sem_seg_file_to_entry) > 0
for det_dict in detection_dicts:
dic = copy.copy(det_dict)
dic.update(sem_seg_file_to_entry[dic["file_name"]])
results.append(dic)
return results
if __name__ == "__main__":
"""
Test the COCO panoptic dataset loader.
Usage:
python -m detectron2.data.datasets.coco_panoptic \
path/to/image_root path/to/panoptic_root path/to/panoptic_json dataset_name 10
"dataset_name" can be "coco_2017_train_panoptic", or other
pre-registered ones
"""
from detectron2.utils.logger import setup_logger
from detectron2.utils.visualizer import Visualizer
import detectron2.data.datasets # noqa # add pre-defined metadata
import sys
from PIL import Image
import numpy as np
logger = setup_logger(name=__name__)
assert sys.argv[4] in DatasetCatalog.list()
meta = MetadataCatalog.get(sys.argv[4])
dicts = load_coco_panoptic_json(sys.argv[3], sys.argv[1], sys.argv[2], meta.as_dict())
logger.info("Done loading {} samples.".format(len(dicts)))
dirname = "coco-data-vis"
os.makedirs(dirname, exist_ok=True)
num_imgs_to_vis = int(sys.argv[5])
for i, d in enumerate(dicts):
img = np.array(Image.open(d["file_name"]))
visualizer = Visualizer(img, metadata=meta)
vis = visualizer.draw_dataset_dict(d)
fpath = os.path.join(dirname, os.path.basename(d["file_name"]))
vis.save(fpath)
if i + 1 >= num_imgs_to_vis:
break
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/coco_panoptic.py
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates.
"""
This file registers pre-defined datasets at hard-coded paths, and their metadata.
We hard-code metadata for common datasets. This will enable:
1. Consistency check when loading the datasets
2. Use models on these standard datasets directly and run demos,
without having to download the dataset annotations
We hard-code some paths to the dataset that's assumed to
exist in "./datasets/".
Users SHOULD NOT use this file to create new dataset / metadata for new dataset.
To add new dataset, refer to the tutorial "docs/DATASETS.md".
"""
import os
from detectron2.data import DatasetCatalog, MetadataCatalog
from .builtin_meta import ADE20K_SEM_SEG_CATEGORIES, _get_builtin_metadata
from .cityscapes import load_cityscapes_instances, load_cityscapes_semantic
from .cityscapes_panoptic import register_all_cityscapes_panoptic
from .coco import load_sem_seg, register_coco_instances
from .coco_panoptic import register_coco_panoptic, register_coco_panoptic_separated
from .lvis import get_lvis_instances_meta, register_lvis_instances
from .pascal_voc import register_pascal_voc
# ==== Predefined datasets and splits for COCO ==========
_PREDEFINED_SPLITS_COCO = {}
_PREDEFINED_SPLITS_COCO["coco"] = {
"coco_2014_train": ("coco/train2014", "coco/annotations/instances_train2014.json"),
"coco_2014_val": ("coco/val2014", "coco/annotations/instances_val2014.json"),
"coco_2014_minival": ("coco/val2014", "coco/annotations/instances_minival2014.json"),
"coco_2014_minival_100": ("coco/val2014", "coco/annotations/instances_minival2014_100.json"),
"coco_2014_valminusminival": (
"coco/val2014",
"coco/annotations/instances_valminusminival2014.json",
),
"coco_2017_train": ("coco/train2017", "coco/annotations/instances_train2017.json"),
"coco_2017_val": ("coco/val2017", "coco/annotations/instances_val2017.json"),
"coco_2017_test": ("coco/test2017", "coco/annotations/image_info_test2017.json"),
"coco_2017_test-dev": ("coco/test2017", "coco/annotations/image_info_test-dev2017.json"),
"coco_2017_val_100": ("coco/val2017", "coco/annotations/instances_val2017_100.json"),
}
_PREDEFINED_SPLITS_COCO["coco_person"] = {
"keypoints_coco_2014_train": (
"coco/train2014",
"coco/annotations/person_keypoints_train2014.json",
),
"keypoints_coco_2014_val": ("coco/val2014", "coco/annotations/person_keypoints_val2014.json"),
"keypoints_coco_2014_minival": (
"coco/val2014",
"coco/annotations/person_keypoints_minival2014.json",
),
"keypoints_coco_2014_valminusminival": (
"coco/val2014",
"coco/annotations/person_keypoints_valminusminival2014.json",
),
"keypoints_coco_2014_minival_100": (
"coco/val2014",
"coco/annotations/person_keypoints_minival2014_100.json",
),
"keypoints_coco_2017_train": (
"coco/train2017",
"coco/annotations/person_keypoints_train2017.json",
),
"keypoints_coco_2017_val": ("coco/val2017", "coco/annotations/person_keypoints_val2017.json"),
"keypoints_coco_2017_val_100": (
"coco/val2017",
"coco/annotations/person_keypoints_val2017_100.json",
),
}
_PREDEFINED_SPLITS_COCO_PANOPTIC = {
"coco_2017_train_panoptic": (
# This is the original panoptic annotation directory
"coco/panoptic_train2017",
"coco/annotations/panoptic_train2017.json",
# This directory contains semantic annotations that are
# converted from panoptic annotations.
# It is used by PanopticFPN.
# You can use the script at detectron2/datasets/prepare_panoptic_fpn.py
# to create these directories.
"coco/panoptic_stuff_train2017",
),
"coco_2017_val_panoptic": (
"coco/panoptic_val2017",
"coco/annotations/panoptic_val2017.json",
"coco/panoptic_stuff_val2017",
),
"coco_2017_val_100_panoptic": (
"coco/panoptic_val2017_100",
"coco/annotations/panoptic_val2017_100.json",
"coco/panoptic_stuff_val2017_100",
),
}
def register_all_coco(root):
for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_COCO.items():
for key, (image_root, json_file) in splits_per_dataset.items():
# Assume pre-defined datasets live in `./datasets`.
register_coco_instances(
key,
_get_builtin_metadata(dataset_name),
os.path.join(root, json_file) if "://" not in json_file else json_file,
os.path.join(root, image_root),
)
for (
prefix,
(panoptic_root, panoptic_json, semantic_root),
) in _PREDEFINED_SPLITS_COCO_PANOPTIC.items():
prefix_instances = prefix[: -len("_panoptic")]
instances_meta = MetadataCatalog.get(prefix_instances)
image_root, instances_json = instances_meta.image_root, instances_meta.json_file
# The "separated" version of COCO panoptic segmentation dataset,
# e.g. used by Panoptic FPN
register_coco_panoptic_separated(
prefix,
_get_builtin_metadata("coco_panoptic_separated"),
image_root,
os.path.join(root, panoptic_root),
os.path.join(root, panoptic_json),
os.path.join(root, semantic_root),
instances_json,
)
# The "standard" version of COCO panoptic segmentation dataset,
# e.g. used by Panoptic-DeepLab
register_coco_panoptic(
prefix,
_get_builtin_metadata("coco_panoptic_standard"),
image_root,
os.path.join(root, panoptic_root),
os.path.join(root, panoptic_json),
instances_json,
)
# ==== Predefined datasets and splits for LVIS ==========
_PREDEFINED_SPLITS_LVIS = {
"lvis_v1": {
"lvis_v1_train": ("coco/", "lvis/lvis_v1_train.json"),
"lvis_v1_val": ("coco/", "lvis/lvis_v1_val.json"),
"lvis_v1_test_dev": ("coco/", "lvis/lvis_v1_image_info_test_dev.json"),
"lvis_v1_test_challenge": ("coco/", "lvis/lvis_v1_image_info_test_challenge.json"),
},
"lvis_v0.5": {
"lvis_v0.5_train": ("coco/", "lvis/lvis_v0.5_train.json"),
"lvis_v0.5_val": ("coco/", "lvis/lvis_v0.5_val.json"),
"lvis_v0.5_val_rand_100": ("coco/", "lvis/lvis_v0.5_val_rand_100.json"),
"lvis_v0.5_test": ("coco/", "lvis/lvis_v0.5_image_info_test.json"),
},
"lvis_v0.5_cocofied": {
"lvis_v0.5_train_cocofied": ("coco/", "lvis/lvis_v0.5_train_cocofied.json"),
"lvis_v0.5_val_cocofied": ("coco/", "lvis/lvis_v0.5_val_cocofied.json"),
},
}
def register_all_lvis(root):
for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_LVIS.items():
for key, (image_root, json_file) in splits_per_dataset.items():
register_lvis_instances(
key,
get_lvis_instances_meta(dataset_name),
os.path.join(root, json_file) if "://" not in json_file else json_file,
os.path.join(root, image_root),
)
# ==== Predefined splits for raw cityscapes images ===========
_RAW_CITYSCAPES_SPLITS = {
"cityscapes_fine_{task}_train": ("cityscapes/leftImg8bit/train/", "cityscapes/gtFine/train/"),
"cityscapes_fine_{task}_val": ("cityscapes/leftImg8bit/val/", "cityscapes/gtFine/val/"),
"cityscapes_fine_{task}_test": ("cityscapes/leftImg8bit/test/", "cityscapes/gtFine/test/"),
}
def register_all_cityscapes(root):
for key, (image_dir, gt_dir) in _RAW_CITYSCAPES_SPLITS.items():
meta = _get_builtin_metadata("cityscapes")
image_dir = os.path.join(root, image_dir)
gt_dir = os.path.join(root, gt_dir)
inst_key = key.format(task="instance_seg")
DatasetCatalog.register(
inst_key,
lambda x=image_dir, y=gt_dir: load_cityscapes_instances(
x, y, from_json=True, to_polygons=True
),
)
MetadataCatalog.get(inst_key).set(
image_dir=image_dir, gt_dir=gt_dir, evaluator_type="cityscapes_instance", **meta
)
sem_key = key.format(task="sem_seg")
DatasetCatalog.register(
sem_key, lambda x=image_dir, y=gt_dir: load_cityscapes_semantic(x, y)
)
MetadataCatalog.get(sem_key).set(
image_dir=image_dir,
gt_dir=gt_dir,
evaluator_type="cityscapes_sem_seg",
ignore_label=255,
**meta,
)
# ==== Predefined splits for PASCAL VOC ===========
def register_all_pascal_voc(root):
SPLITS = [
("voc_2007_trainval", "VOC2007", "trainval"),
("voc_2007_train", "VOC2007", "train"),
("voc_2007_val", "VOC2007", "val"),
("voc_2007_test", "VOC2007", "test"),
("voc_2012_trainval", "VOC2012", "trainval"),
("voc_2012_train", "VOC2012", "train"),
("voc_2012_val", "VOC2012", "val"),
]
for name, dirname, split in SPLITS:
year = 2007 if "2007" in name else 2012
register_pascal_voc(name, os.path.join(root, dirname), split, year)
MetadataCatalog.get(name).evaluator_type = "pascal_voc"
def register_all_ade20k(root):
root = os.path.join(root, "ADEChallengeData2016")
for name, dirname in [("train", "training"), ("val", "validation")]:
image_dir = os.path.join(root, "images", dirname)
gt_dir = os.path.join(root, "annotations_detectron2", dirname)
name = f"ade20k_sem_seg_{name}"
DatasetCatalog.register(
name, lambda x=image_dir, y=gt_dir: load_sem_seg(y, x, gt_ext="png", image_ext="jpg")
)
MetadataCatalog.get(name).set(
stuff_classes=ADE20K_SEM_SEG_CATEGORIES[:],
image_root=image_dir,
sem_seg_root=gt_dir,
evaluator_type="sem_seg",
ignore_label=255,
)
# True for open source;
# Internally at fb, we register them elsewhere
if __name__.endswith(".builtin"):
# Assume pre-defined datasets live in `./datasets`.
_root = os.getenv("DETECTRON2_DATASETS", "datasets")
register_all_coco(_root)
register_all_lvis(_root)
register_all_cityscapes(_root)
register_all_cityscapes_panoptic(_root)
register_all_pascal_voc(_root)
register_all_ade20k(_root)
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/builtin.py
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates.
import numpy as np
import os
import xml.etree.ElementTree as ET
from typing import List, Tuple, Union
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.structures import BoxMode
from detectron2.utils.file_io import PathManager
__all__ = ["load_voc_instances", "register_pascal_voc"]
# fmt: off
CLASS_NAMES = (
"aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat",
"chair", "cow", "diningtable", "dog", "horse", "motorbike", "person",
"pottedplant", "sheep", "sofa", "train", "tvmonitor"
)
# fmt: on
def load_voc_instances(dirname: str, split: str, class_names: Union[List[str], Tuple[str, ...]]):
"""
Load Pascal VOC detection annotations to Detectron2 format.
Args:
dirname: Contain "Annotations", "ImageSets", "JPEGImages"
split (str): one of "train", "test", "val", "trainval"
class_names: list or tuple of class names
"""
with PathManager.open(os.path.join(dirname, "ImageSets", "Main", split + ".txt")) as f:
fileids = np.loadtxt(f, dtype=np.str)
# Needs to read many small annotation files. Makes sense at local
annotation_dirname = PathManager.get_local_path(os.path.join(dirname, "Annotations/"))
dicts = []
for fileid in fileids:
anno_file = os.path.join(annotation_dirname, fileid + ".xml")
jpeg_file = os.path.join(dirname, "JPEGImages", fileid + ".jpg")
with PathManager.open(anno_file) as f:
tree = ET.parse(f)
r = {
"file_name": jpeg_file,
"image_id": fileid,
"height": int(tree.findall("./size/height")[0].text),
"width": int(tree.findall("./size/width")[0].text),
}
instances = []
for obj in tree.findall("object"):
cls = obj.find("name").text
# We include "difficult" samples in training.
# Based on limited experiments, they don't hurt accuracy.
# difficult = int(obj.find("difficult").text)
# if difficult == 1:
# continue
bbox = obj.find("bndbox")
bbox = [float(bbox.find(x).text) for x in ["xmin", "ymin", "xmax", "ymax"]]
# Original annotations are integers in the range [1, W or H]
# Assuming they mean 1-based pixel indices (inclusive),
# a box with annotation (xmin=1, xmax=W) covers the whole image.
# In coordinate space this is represented by (xmin=0, xmax=W)
bbox[0] -= 1.0
bbox[1] -= 1.0
instances.append(
{"category_id": class_names.index(cls), "bbox": bbox, "bbox_mode": BoxMode.XYXY_ABS}
)
r["annotations"] = instances
dicts.append(r)
return dicts
def register_pascal_voc(name, dirname, split, year, class_names=CLASS_NAMES):
DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split, class_names))
MetadataCatalog.get(name).set(
thing_classes=list(class_names), dirname=dirname, year=year, split=split
)
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/pascal_voc.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
# Autogen with
# with open("lvis_v0.5_val.json", "r") as f:
# a = json.load(f)
# c = a["categories"]
# for x in c:
# del x["image_count"]
# del x["instance_count"]
# LVIS_CATEGORIES = repr(c) + " # noqa"
# fmt: off
LVIS_CATEGORIES = [{'frequency': 'r', 'id': 1, 'synset': 'acorn.n.01', 'synonyms': ['acorn'], 'def': 'nut from an oak tree', 'name': 'acorn'}, {'frequency': 'c', 'id': 2, 'synset': 'aerosol.n.02', 'synonyms': ['aerosol_can', 'spray_can'], 'def': 'a dispenser that holds a substance under pressure', 'name': 'aerosol_can'}, {'frequency': 'f', 'id': 3, 'synset': 'air_conditioner.n.01', 'synonyms': ['air_conditioner'], 'def': 'a machine that keeps air cool and dry', 'name': 'air_conditioner'}, {'frequency': 'f', 'id': 4, 'synset': 'airplane.n.01', 'synonyms': ['airplane', 'aeroplane'], 'def': 'an aircraft that has a fixed wing and is powered by propellers or jets', 'name': 'airplane'}, {'frequency': 'c', 'id': 5, 'synset': 'alarm_clock.n.01', 'synonyms': ['alarm_clock'], 'def': 'a clock that wakes a sleeper at some preset time', 'name': 'alarm_clock'}, {'frequency': 'c', 'id': 6, 'synset': 'alcohol.n.01', 'synonyms': ['alcohol', 'alcoholic_beverage'], 'def': 'a liquor or brew containing alcohol as the active agent', 'name': 'alcohol'}, {'frequency': 'r', 'id': 7, 'synset': 'alligator.n.02', 'synonyms': ['alligator', 'gator'], 'def': 'amphibious reptiles related to crocodiles but with shorter broader snouts', 'name': 'alligator'}, {'frequency': 'c', 'id': 8, 'synset': 'almond.n.02', 'synonyms': ['almond'], 'def': 'oval-shaped edible seed of the almond tree', 'name': 'almond'}, {'frequency': 'c', 'id': 9, 'synset': 'ambulance.n.01', 'synonyms': ['ambulance'], 'def': 'a vehicle that takes people to and from hospitals', 'name': 'ambulance'}, {'frequency': 'r', 'id': 10, 'synset': 'amplifier.n.01', 'synonyms': ['amplifier'], 'def': 'electronic equipment that increases strength of signals', 'name': 'amplifier'}, {'frequency': 'c', 'id': 11, 'synset': 'anklet.n.03', 'synonyms': ['anklet', 'ankle_bracelet'], 'def': 'an ornament worn around the ankle', 'name': 'anklet'}, {'frequency': 'f', 'id': 12, 'synset': 'antenna.n.01', 'synonyms': ['antenna', 'aerial', 'transmitting_aerial'], 'def': 'an electrical device that sends or receives radio or television signals', 'name': 'antenna'}, {'frequency': 'f', 'id': 13, 'synset': 'apple.n.01', 'synonyms': ['apple'], 'def': 'fruit with red or yellow or green skin and sweet to tart crisp whitish flesh', 'name': 'apple'}, {'frequency': 'r', 'id': 14, 'synset': 'apple_juice.n.01', 'synonyms': ['apple_juice'], 'def': 'the juice of apples', 'name': 'apple_juice'}, {'frequency': 'r', 'id': 15, 'synset': 'applesauce.n.01', 'synonyms': ['applesauce'], 'def': 'puree of stewed apples usually sweetened and spiced', 'name': 'applesauce'}, {'frequency': 'r', 'id': 16, 'synset': 'apricot.n.02', 'synonyms': ['apricot'], 'def': 'downy yellow to rosy-colored fruit resembling a small peach', 'name': 'apricot'}, {'frequency': 'f', 'id': 17, 'synset': 'apron.n.01', 'synonyms': ['apron'], 'def': 'a garment of cloth that is tied about the waist and worn to protect clothing', 'name': 'apron'}, {'frequency': 'c', 'id': 18, 'synset': 'aquarium.n.01', 'synonyms': ['aquarium', 'fish_tank'], 'def': 'a tank/pool/bowl filled with water for keeping live fish and underwater animals', 'name': 'aquarium'}, {'frequency': 'c', 'id': 19, 'synset': 'armband.n.02', 'synonyms': ['armband'], 'def': 'a band worn around the upper arm', 'name': 'armband'}, {'frequency': 'f', 'id': 20, 'synset': 'armchair.n.01', 'synonyms': ['armchair'], 'def': 'chair with a support on each side for arms', 'name': 'armchair'}, {'frequency': 'r', 'id': 21, 'synset': 'armoire.n.01', 'synonyms': ['armoire'], 'def': 'a large wardrobe or cabinet', 'name': 'armoire'}, {'frequency': 'r', 'id': 22, 'synset': 'armor.n.01', 'synonyms': ['armor', 'armour'], 'def': 'protective covering made of metal and used in combat', 'name': 'armor'}, {'frequency': 'c', 'id': 23, 'synset': 'artichoke.n.02', 'synonyms': ['artichoke'], 'def': 'a thistlelike flower head with edible fleshy leaves and heart', 'name': 'artichoke'}, {'frequency': 'f', 'id': 24, 'synset': 'ashcan.n.01', 'synonyms': ['trash_can', 'garbage_can', 'wastebin', 'dustbin', 'trash_barrel', 'trash_bin'], 'def': 'a bin that holds rubbish until it is collected', 'name': 'trash_can'}, {'frequency': 'c', 'id': 25, 'synset': 'ashtray.n.01', 'synonyms': ['ashtray'], 'def': "a receptacle for the ash from smokers' cigars or cigarettes", 'name': 'ashtray'}, {'frequency': 'c', 'id': 26, 'synset': 'asparagus.n.02', 'synonyms': ['asparagus'], 'def': 'edible young shoots of the asparagus plant', 'name': 'asparagus'}, {'frequency': 'c', 'id': 27, 'synset': 'atomizer.n.01', 'synonyms': ['atomizer', 'atomiser', 'spray', 'sprayer', 'nebulizer', 'nebuliser'], 'def': 'a dispenser that turns a liquid (such as perfume) into a fine mist', 'name': 'atomizer'}, {'frequency': 'c', 'id': 28, 'synset': 'avocado.n.01', 'synonyms': ['avocado'], 'def': 'a pear-shaped fruit with green or blackish skin and rich yellowish pulp enclosing a single large seed', 'name': 'avocado'}, {'frequency': 'c', 'id': 29, 'synset': 'award.n.02', 'synonyms': ['award', 'accolade'], 'def': 'a tangible symbol signifying approval or distinction', 'name': 'award'}, {'frequency': 'f', 'id': 30, 'synset': 'awning.n.01', 'synonyms': ['awning'], 'def': 'a canopy made of canvas to shelter people or things from rain or sun', 'name': 'awning'}, {'frequency': 'r', 'id': 31, 'synset': 'ax.n.01', 'synonyms': ['ax', 'axe'], 'def': 'an edge tool with a heavy bladed head mounted across a handle', 'name': 'ax'}, {'frequency': 'f', 'id': 32, 'synset': 'baby_buggy.n.01', 'synonyms': ['baby_buggy', 'baby_carriage', 'perambulator', 'pram', 'stroller'], 'def': 'a small vehicle with four wheels in which a baby or child is pushed around', 'name': 'baby_buggy'}, {'frequency': 'c', 'id': 33, 'synset': 'backboard.n.01', 'synonyms': ['basketball_backboard'], 'def': 'a raised vertical board with basket attached; used to play basketball', 'name': 'basketball_backboard'}, {'frequency': 'f', 'id': 34, 'synset': 'backpack.n.01', 'synonyms': ['backpack', 'knapsack', 'packsack', 'rucksack', 'haversack'], 'def': 'a bag carried by a strap on your back or shoulder', 'name': 'backpack'}, {'frequency': 'f', 'id': 35, 'synset': 'bag.n.04', 'synonyms': ['handbag', 'purse', 'pocketbook'], 'def': 'a container used for carrying money and small personal items or accessories', 'name': 'handbag'}, {'frequency': 'f', 'id': 36, 'synset': 'bag.n.06', 'synonyms': ['suitcase', 'baggage', 'luggage'], 'def': 'cases used to carry belongings when traveling', 'name': 'suitcase'}, {'frequency': 'c', 'id': 37, 'synset': 'bagel.n.01', 'synonyms': ['bagel', 'beigel'], 'def': 'glazed yeast-raised doughnut-shaped roll with hard crust', 'name': 'bagel'}, {'frequency': 'r', 'id': 38, 'synset': 'bagpipe.n.01', 'synonyms': ['bagpipe'], 'def': 'a tubular wind instrument; the player blows air into a bag and squeezes it out', 'name': 'bagpipe'}, {'frequency': 'r', 'id': 39, 'synset': 'baguet.n.01', 'synonyms': ['baguet', 'baguette'], 'def': 'narrow French stick loaf', 'name': 'baguet'}, {'frequency': 'r', 'id': 40, 'synset': 'bait.n.02', 'synonyms': ['bait', 'lure'], 'def': 'something used to lure fish or other animals into danger so they can be trapped or killed', 'name': 'bait'}, {'frequency': 'f', 'id': 41, 'synset': 'ball.n.06', 'synonyms': ['ball'], 'def': 'a spherical object used as a plaything', 'name': 'ball'}, {'frequency': 'r', 'id': 42, 'synset': 'ballet_skirt.n.01', 'synonyms': ['ballet_skirt', 'tutu'], 'def': 'very short skirt worn by ballerinas', 'name': 'ballet_skirt'}, {'frequency': 'f', 'id': 43, 'synset': 'balloon.n.01', 'synonyms': ['balloon'], 'def': 'large tough nonrigid bag filled with gas or heated air', 'name': 'balloon'}, {'frequency': 'c', 'id': 44, 'synset': 'bamboo.n.02', 'synonyms': ['bamboo'], 'def': 'woody tropical grass having hollow woody stems', 'name': 'bamboo'}, {'frequency': 'f', 'id': 45, 'synset': 'banana.n.02', 'synonyms': ['banana'], 'def': 'elongated crescent-shaped yellow fruit with soft sweet flesh', 'name': 'banana'}, {'frequency': 'r', 'id': 46, 'synset': 'band_aid.n.01', 'synonyms': ['Band_Aid'], 'def': 'trade name for an adhesive bandage to cover small cuts or blisters', 'name': 'Band_Aid'}, {'frequency': 'c', 'id': 47, 'synset': 'bandage.n.01', 'synonyms': ['bandage'], 'def': 'a piece of soft material that covers and protects an injured part of the body', 'name': 'bandage'}, {'frequency': 'c', 'id': 48, 'synset': 'bandanna.n.01', 'synonyms': ['bandanna', 'bandana'], 'def': 'large and brightly colored handkerchief; often used as a neckerchief', 'name': 'bandanna'}, {'frequency': 'r', 'id': 49, 'synset': 'banjo.n.01', 'synonyms': ['banjo'], 'def': 'a stringed instrument of the guitar family with a long neck and circular body', 'name': 'banjo'}, {'frequency': 'f', 'id': 50, 'synset': 'banner.n.01', 'synonyms': ['banner', 'streamer'], 'def': 'long strip of cloth or paper used for decoration or advertising', 'name': 'banner'}, {'frequency': 'r', 'id': 51, 'synset': 'barbell.n.01', 'synonyms': ['barbell'], 'def': 'a bar to which heavy discs are attached at each end; used in weightlifting', 'name': 'barbell'}, {'frequency': 'r', 'id': 52, 'synset': 'barge.n.01', 'synonyms': ['barge'], 'def': 'a flatbottom boat for carrying heavy loads (especially on canals)', 'name': 'barge'}, {'frequency': 'f', 'id': 53, 'synset': 'barrel.n.02', 'synonyms': ['barrel', 'cask'], 'def': 'a cylindrical container that holds liquids', 'name': 'barrel'}, {'frequency': 'c', 'id': 54, 'synset': 'barrette.n.01', 'synonyms': ['barrette'], 'def': "a pin for holding women's hair in place", 'name': 'barrette'}, {'frequency': 'c', 'id': 55, 'synset': 'barrow.n.03', 'synonyms': ['barrow', 'garden_cart', 'lawn_cart', 'wheelbarrow'], 'def': 'a cart for carrying small loads; has handles and one or more wheels', 'name': 'barrow'}, {'frequency': 'f', 'id': 56, 'synset': 'base.n.03', 'synonyms': ['baseball_base'], 'def': 'a place that the runner must touch before scoring', 'name': 'baseball_base'}, {'frequency': 'f', 'id': 57, 'synset': 'baseball.n.02', 'synonyms': ['baseball'], 'def': 'a ball used in playing baseball', 'name': 'baseball'}, {'frequency': 'f', 'id': 58, 'synset': 'baseball_bat.n.01', 'synonyms': ['baseball_bat'], 'def': 'an implement used in baseball by the batter', 'name': 'baseball_bat'}, {'frequency': 'f', 'id': 59, 'synset': 'baseball_cap.n.01', 'synonyms': ['baseball_cap', 'jockey_cap', 'golf_cap'], 'def': 'a cap with a bill', 'name': 'baseball_cap'}, {'frequency': 'f', 'id': 60, 'synset': 'baseball_glove.n.01', 'synonyms': ['baseball_glove', 'baseball_mitt'], 'def': 'the handwear used by fielders in playing baseball', 'name': 'baseball_glove'}, {'frequency': 'f', 'id': 61, 'synset': 'basket.n.01', 'synonyms': ['basket', 'handbasket'], 'def': 'a container that is usually woven and has handles', 'name': 'basket'}, {'frequency': 'c', 'id': 62, 'synset': 'basket.n.03', 'synonyms': ['basketball_hoop'], 'def': 'metal hoop supporting a net through which players try to throw the basketball', 'name': 'basketball_hoop'}, {'frequency': 'c', 'id': 63, 'synset': 'basketball.n.02', 'synonyms': ['basketball'], 'def': 'an inflated ball used in playing basketball', 'name': 'basketball'}, {'frequency': 'r', 'id': 64, 'synset': 'bass_horn.n.01', 'synonyms': ['bass_horn', 'sousaphone', 'tuba'], 'def': 'the lowest brass wind instrument', 'name': 'bass_horn'}, {'frequency': 'r', 'id': 65, 'synset': 'bat.n.01', 'synonyms': ['bat_(animal)'], 'def': 'nocturnal mouselike mammal with forelimbs modified to form membranous wings', 'name': 'bat_(animal)'}, {'frequency': 'f', 'id': 66, 'synset': 'bath_mat.n.01', 'synonyms': ['bath_mat'], 'def': 'a heavy towel or mat to stand on while drying yourself after a bath', 'name': 'bath_mat'}, {'frequency': 'f', 'id': 67, 'synset': 'bath_towel.n.01', 'synonyms': ['bath_towel'], 'def': 'a large towel; to dry yourself after a bath', 'name': 'bath_towel'}, {'frequency': 'c', 'id': 68, 'synset': 'bathrobe.n.01', 'synonyms': ['bathrobe'], 'def': 'a loose-fitting robe of towelling; worn after a bath or swim', 'name': 'bathrobe'}, {'frequency': 'f', 'id': 69, 'synset': 'bathtub.n.01', 'synonyms': ['bathtub', 'bathing_tub'], 'def': 'a large open container that you fill with water and use to wash the body', 'name': 'bathtub'}, {'frequency': 'r', 'id': 70, 'synset': 'batter.n.02', 'synonyms': ['batter_(food)'], 'def': 'a liquid or semiliquid mixture, as of flour, eggs, and milk, used in cooking', 'name': 'batter_(food)'}, {'frequency': 'c', 'id': 71, 'synset': 'battery.n.02', 'synonyms': ['battery'], 'def': 'a portable device that produces electricity', 'name': 'battery'}, {'frequency': 'r', 'id': 72, 'synset': 'beach_ball.n.01', 'synonyms': ['beachball'], 'def': 'large and light ball; for play at the seaside', 'name': 'beachball'}, {'frequency': 'c', 'id': 73, 'synset': 'bead.n.01', 'synonyms': ['bead'], 'def': 'a small ball with a hole through the middle used for ornamentation, jewellery, etc.', 'name': 'bead'}, {'frequency': 'r', 'id': 74, 'synset': 'beaker.n.01', 'synonyms': ['beaker'], 'def': 'a flatbottomed jar made of glass or plastic; used for chemistry', 'name': 'beaker'}, {'frequency': 'c', 'id': 75, 'synset': 'bean_curd.n.01', 'synonyms': ['bean_curd', 'tofu'], 'def': 'cheeselike food made of curdled soybean milk', 'name': 'bean_curd'}, {'frequency': 'c', 'id': 76, 'synset': 'beanbag.n.01', 'synonyms': ['beanbag'], 'def': 'a bag filled with dried beans or similar items; used in games or to sit on', 'name': 'beanbag'}, {'frequency': 'f', 'id': 77, 'synset': 'beanie.n.01', 'synonyms': ['beanie', 'beany'], 'def': 'a small skullcap; formerly worn by schoolboys and college freshmen', 'name': 'beanie'}, {'frequency': 'f', 'id': 78, 'synset': 'bear.n.01', 'synonyms': ['bear'], 'def': 'large carnivorous or omnivorous mammals with shaggy coats and claws', 'name': 'bear'}, {'frequency': 'f', 'id': 79, 'synset': 'bed.n.01', 'synonyms': ['bed'], 'def': 'a piece of furniture that provides a place to sleep', 'name': 'bed'}, {'frequency': 'c', 'id': 80, 'synset': 'bedspread.n.01', 'synonyms': ['bedspread', 'bedcover', 'bed_covering', 'counterpane', 'spread'], 'def': 'decorative cover for a bed', 'name': 'bedspread'}, {'frequency': 'f', 'id': 81, 'synset': 'beef.n.01', 'synonyms': ['cow'], 'def': 'cattle that are reared for their meat', 'name': 'cow'}, {'frequency': 'c', 'id': 82, 'synset': 'beef.n.02', 'synonyms': ['beef_(food)', 'boeuf_(food)'], 'def': 'meat from an adult domestic bovine', 'name': 'beef_(food)'}, {'frequency': 'r', 'id': 83, 'synset': 'beeper.n.01', 'synonyms': ['beeper', 'pager'], 'def': 'an device that beeps when the person carrying it is being paged', 'name': 'beeper'}, {'frequency': 'f', 'id': 84, 'synset': 'beer_bottle.n.01', 'synonyms': ['beer_bottle'], 'def': 'a bottle that holds beer', 'name': 'beer_bottle'}, {'frequency': 'c', 'id': 85, 'synset': 'beer_can.n.01', 'synonyms': ['beer_can'], 'def': 'a can that holds beer', 'name': 'beer_can'}, {'frequency': 'r', 'id': 86, 'synset': 'beetle.n.01', 'synonyms': ['beetle'], 'def': 'insect with hard wing covers', 'name': 'beetle'}, {'frequency': 'f', 'id': 87, 'synset': 'bell.n.01', 'synonyms': ['bell'], 'def': 'a hollow device made of metal that makes a ringing sound when struck', 'name': 'bell'}, {'frequency': 'f', 'id': 88, 'synset': 'bell_pepper.n.02', 'synonyms': ['bell_pepper', 'capsicum'], 'def': 'large bell-shaped sweet pepper in green or red or yellow or orange or black varieties', 'name': 'bell_pepper'}, {'frequency': 'f', 'id': 89, 'synset': 'belt.n.02', 'synonyms': ['belt'], 'def': 'a band to tie or buckle around the body (usually at the waist)', 'name': 'belt'}, {'frequency': 'f', 'id': 90, 'synset': 'belt_buckle.n.01', 'synonyms': ['belt_buckle'], 'def': 'the buckle used to fasten a belt', 'name': 'belt_buckle'}, {'frequency': 'f', 'id': 91, 'synset': 'bench.n.01', 'synonyms': ['bench'], 'def': 'a long seat for more than one person', 'name': 'bench'}, {'frequency': 'c', 'id': 92, 'synset': 'beret.n.01', 'synonyms': ['beret'], 'def': 'a cap with no brim or bill; made of soft cloth', 'name': 'beret'}, {'frequency': 'c', 'id': 93, 'synset': 'bib.n.02', 'synonyms': ['bib'], 'def': 'a napkin tied under the chin of a child while eating', 'name': 'bib'}, {'frequency': 'r', 'id': 94, 'synset': 'bible.n.01', 'synonyms': ['Bible'], 'def': 'the sacred writings of the Christian religions', 'name': 'Bible'}, {'frequency': 'f', 'id': 95, 'synset': 'bicycle.n.01', 'synonyms': ['bicycle', 'bike_(bicycle)'], 'def': 'a wheeled vehicle that has two wheels and is moved by foot pedals', 'name': 'bicycle'}, {'frequency': 'f', 'id': 96, 'synset': 'bill.n.09', 'synonyms': ['visor', 'vizor'], 'def': 'a brim that projects to the front to shade the eyes', 'name': 'visor'}, {'frequency': 'c', 'id': 97, 'synset': 'binder.n.03', 'synonyms': ['binder', 'ring-binder'], 'def': 'holds loose papers or magazines', 'name': 'binder'}, {'frequency': 'c', 'id': 98, 'synset': 'binoculars.n.01', 'synonyms': ['binoculars', 'field_glasses', 'opera_glasses'], 'def': 'an optical instrument designed for simultaneous use by both eyes', 'name': 'binoculars'}, {'frequency': 'f', 'id': 99, 'synset': 'bird.n.01', 'synonyms': ['bird'], 'def': 'animal characterized by feathers and wings', 'name': 'bird'}, {'frequency': 'r', 'id': 100, 'synset': 'bird_feeder.n.01', 'synonyms': ['birdfeeder'], 'def': 'an outdoor device that supplies food for wild birds', 'name': 'birdfeeder'}, {'frequency': 'r', 'id': 101, 'synset': 'birdbath.n.01', 'synonyms': ['birdbath'], 'def': 'an ornamental basin (usually in a garden) for birds to bathe in', 'name': 'birdbath'}, {'frequency': 'c', 'id': 102, 'synset': 'birdcage.n.01', 'synonyms': ['birdcage'], 'def': 'a cage in which a bird can be kept', 'name': 'birdcage'}, {'frequency': 'c', 'id': 103, 'synset': 'birdhouse.n.01', 'synonyms': ['birdhouse'], 'def': 'a shelter for birds', 'name': 'birdhouse'}, {'frequency': 'f', 'id': 104, 'synset': 'birthday_cake.n.01', 'synonyms': ['birthday_cake'], 'def': 'decorated cake served at a birthday party', 'name': 'birthday_cake'}, {'frequency': 'r', 'id': 105, 'synset': 'birthday_card.n.01', 'synonyms': ['birthday_card'], 'def': 'a card expressing a birthday greeting', 'name': 'birthday_card'}, {'frequency': 'r', 'id': 106, 'synset': 'biscuit.n.01', 'synonyms': ['biscuit_(bread)'], 'def': 'small round bread leavened with baking-powder or soda', 'name': 'biscuit_(bread)'}, {'frequency': 'r', 'id': 107, 'synset': 'black_flag.n.01', 'synonyms': ['pirate_flag'], 'def': 'a flag usually bearing a white skull and crossbones on a black background', 'name': 'pirate_flag'}, {'frequency': 'c', 'id': 108, 'synset': 'black_sheep.n.02', 'synonyms': ['black_sheep'], 'def': 'sheep with a black coat', 'name': 'black_sheep'}, {'frequency': 'c', 'id': 109, 'synset': 'blackboard.n.01', 'synonyms': ['blackboard', 'chalkboard'], 'def': 'sheet of slate; for writing with chalk', 'name': 'blackboard'}, {'frequency': 'f', 'id': 110, 'synset': 'blanket.n.01', 'synonyms': ['blanket'], 'def': 'bedding that keeps a person warm in bed', 'name': 'blanket'}, {'frequency': 'c', 'id': 111, 'synset': 'blazer.n.01', 'synonyms': ['blazer', 'sport_jacket', 'sport_coat', 'sports_jacket', 'sports_coat'], 'def': 'lightweight jacket; often striped in the colors of a club or school', 'name': 'blazer'}, {'frequency': 'f', 'id': 112, 'synset': 'blender.n.01', 'synonyms': ['blender', 'liquidizer', 'liquidiser'], 'def': 'an electrically powered mixer that mix or chop or liquefy foods', 'name': 'blender'}, {'frequency': 'r', 'id': 113, 'synset': 'blimp.n.02', 'synonyms': ['blimp'], 'def': 'a small nonrigid airship used for observation or as a barrage balloon', 'name': 'blimp'}, {'frequency': 'c', 'id': 114, 'synset': 'blinker.n.01', 'synonyms': ['blinker', 'flasher'], 'def': 'a light that flashes on and off; used as a signal or to send messages', 'name': 'blinker'}, {'frequency': 'c', 'id': 115, 'synset': 'blueberry.n.02', 'synonyms': ['blueberry'], 'def': 'sweet edible dark-blue berries of blueberry plants', 'name': 'blueberry'}, {'frequency': 'r', 'id': 116, 'synset': 'boar.n.02', 'synonyms': ['boar'], 'def': 'an uncastrated male hog', 'name': 'boar'}, {'frequency': 'r', 'id': 117, 'synset': 'board.n.09', 'synonyms': ['gameboard'], 'def': 'a flat portable surface (usually rectangular) designed for board games', 'name': 'gameboard'}, {'frequency': 'f', 'id': 118, 'synset': 'boat.n.01', 'synonyms': ['boat', 'ship_(boat)'], 'def': 'a vessel for travel on water', 'name': 'boat'}, {'frequency': 'c', 'id': 119, 'synset': 'bobbin.n.01', 'synonyms': ['bobbin', 'spool', 'reel'], 'def': 'a thing around which thread/tape/film or other flexible materials can be wound', 'name': 'bobbin'}, {'frequency': 'r', 'id': 120, 'synset': 'bobby_pin.n.01', 'synonyms': ['bobby_pin', 'hairgrip'], 'def': 'a flat wire hairpin used to hold bobbed hair in place', 'name': 'bobby_pin'}, {'frequency': 'c', 'id': 121, 'synset': 'boiled_egg.n.01', 'synonyms': ['boiled_egg', 'coddled_egg'], 'def': 'egg cooked briefly in the shell in gently boiling water', 'name': 'boiled_egg'}, {'frequency': 'r', 'id': 122, 'synset': 'bolo_tie.n.01', 'synonyms': ['bolo_tie', 'bolo', 'bola_tie', 'bola'], 'def': 'a cord fastened around the neck with an ornamental clasp and worn as a necktie', 'name': 'bolo_tie'}, {'frequency': 'c', 'id': 123, 'synset': 'bolt.n.03', 'synonyms': ['deadbolt'], 'def': 'the part of a lock that is engaged or withdrawn with a key', 'name': 'deadbolt'}, {'frequency': 'f', 'id': 124, 'synset': 'bolt.n.06', 'synonyms': ['bolt'], 'def': 'a screw that screws into a nut to form a fastener', 'name': 'bolt'}, {'frequency': 'r', 'id': 125, 'synset': 'bonnet.n.01', 'synonyms': ['bonnet'], 'def': 'a hat tied under the chin', 'name': 'bonnet'}, {'frequency': 'f', 'id': 126, 'synset': 'book.n.01', 'synonyms': ['book'], 'def': 'a written work or composition that has been published', 'name': 'book'}, {'frequency': 'r', 'id': 127, 'synset': 'book_bag.n.01', 'synonyms': ['book_bag'], 'def': 'a bag in which students carry their books', 'name': 'book_bag'}, {'frequency': 'c', 'id': 128, 'synset': 'bookcase.n.01', 'synonyms': ['bookcase'], 'def': 'a piece of furniture with shelves for storing books', 'name': 'bookcase'}, {'frequency': 'c', 'id': 129, 'synset': 'booklet.n.01', 'synonyms': ['booklet', 'brochure', 'leaflet', 'pamphlet'], 'def': 'a small book usually having a paper cover', 'name': 'booklet'}, {'frequency': 'r', 'id': 130, 'synset': 'bookmark.n.01', 'synonyms': ['bookmark', 'bookmarker'], 'def': 'a marker (a piece of paper or ribbon) placed between the pages of a book', 'name': 'bookmark'}, {'frequency': 'r', 'id': 131, 'synset': 'boom.n.04', 'synonyms': ['boom_microphone', 'microphone_boom'], 'def': 'a pole carrying an overhead microphone projected over a film or tv set', 'name': 'boom_microphone'}, {'frequency': 'f', 'id': 132, 'synset': 'boot.n.01', 'synonyms': ['boot'], 'def': 'footwear that covers the whole foot and lower leg', 'name': 'boot'}, {'frequency': 'f', 'id': 133, 'synset': 'bottle.n.01', 'synonyms': ['bottle'], 'def': 'a glass or plastic vessel used for storing drinks or other liquids', 'name': 'bottle'}, {'frequency': 'c', 'id': 134, 'synset': 'bottle_opener.n.01', 'synonyms': ['bottle_opener'], 'def': 'an opener for removing caps or corks from bottles', 'name': 'bottle_opener'}, {'frequency': 'c', 'id': 135, 'synset': 'bouquet.n.01', 'synonyms': ['bouquet'], 'def': 'an arrangement of flowers that is usually given as a present', 'name': 'bouquet'}, {'frequency': 'r', 'id': 136, 'synset': 'bow.n.04', 'synonyms': ['bow_(weapon)'], 'def': 'a weapon for shooting arrows', 'name': 'bow_(weapon)'}, {'frequency': 'f', 'id': 137, 'synset': 'bow.n.08', 'synonyms': ['bow_(decorative_ribbons)'], 'def': 'a decorative interlacing of ribbons', 'name': 'bow_(decorative_ribbons)'}, {'frequency': 'f', 'id': 138, 'synset': 'bow_tie.n.01', 'synonyms': ['bow-tie', 'bowtie'], 'def': "a man's tie that ties in a bow", 'name': 'bow-tie'}, {'frequency': 'f', 'id': 139, 'synset': 'bowl.n.03', 'synonyms': ['bowl'], 'def': 'a dish that is round and open at the top for serving foods', 'name': 'bowl'}, {'frequency': 'r', 'id': 140, 'synset': 'bowl.n.08', 'synonyms': ['pipe_bowl'], 'def': 'a small round container that is open at the top for holding tobacco', 'name': 'pipe_bowl'}, {'frequency': 'c', 'id': 141, 'synset': 'bowler_hat.n.01', 'synonyms': ['bowler_hat', 'bowler', 'derby_hat', 'derby', 'plug_hat'], 'def': 'a felt hat that is round and hard with a narrow brim', 'name': 'bowler_hat'}, {'frequency': 'r', 'id': 142, 'synset': 'bowling_ball.n.01', 'synonyms': ['bowling_ball'], 'def': 'a large ball with finger holes used in the sport of bowling', 'name': 'bowling_ball'}, {'frequency': 'r', 'id': 143, 'synset': 'bowling_pin.n.01', 'synonyms': ['bowling_pin'], 'def': 'a club-shaped wooden object used in bowling', 'name': 'bowling_pin'}, {'frequency': 'r', 'id': 144, 'synset': 'boxing_glove.n.01', 'synonyms': ['boxing_glove'], 'def': 'large glove coverings the fists of a fighter worn for the sport of boxing', 'name': 'boxing_glove'}, {'frequency': 'c', 'id': 145, 'synset': 'brace.n.06', 'synonyms': ['suspenders'], 'def': 'elastic straps that hold trousers up (usually used in the plural)', 'name': 'suspenders'}, {'frequency': 'f', 'id': 146, 'synset': 'bracelet.n.02', 'synonyms': ['bracelet', 'bangle'], 'def': 'jewelry worn around the wrist for decoration', 'name': 'bracelet'}, {'frequency': 'r', 'id': 147, 'synset': 'brass.n.07', 'synonyms': ['brass_plaque'], 'def': 'a memorial made of brass', 'name': 'brass_plaque'}, {'frequency': 'c', 'id': 148, 'synset': 'brassiere.n.01', 'synonyms': ['brassiere', 'bra', 'bandeau'], 'def': 'an undergarment worn by women to support their breasts', 'name': 'brassiere'}, {'frequency': 'c', 'id': 149, 'synset': 'bread-bin.n.01', 'synonyms': ['bread-bin', 'breadbox'], 'def': 'a container used to keep bread or cake in', 'name': 'bread-bin'}, {'frequency': 'r', 'id': 150, 'synset': 'breechcloth.n.01', 'synonyms': ['breechcloth', 'breechclout', 'loincloth'], 'def': 'a garment that provides covering for the loins', 'name': 'breechcloth'}, {'frequency': 'c', 'id': 151, 'synset': 'bridal_gown.n.01', 'synonyms': ['bridal_gown', 'wedding_gown', 'wedding_dress'], 'def': 'a gown worn by the bride at a wedding', 'name': 'bridal_gown'}, {'frequency': 'c', 'id': 152, 'synset': 'briefcase.n.01', 'synonyms': ['briefcase'], 'def': 'a case with a handle; for carrying papers or files or books', 'name': 'briefcase'}, {'frequency': 'c', 'id': 153, 'synset': 'bristle_brush.n.01', 'synonyms': ['bristle_brush'], 'def': 'a brush that is made with the short stiff hairs of an animal or plant', 'name': 'bristle_brush'}, {'frequency': 'f', 'id': 154, 'synset': 'broccoli.n.01', 'synonyms': ['broccoli'], 'def': 'plant with dense clusters of tight green flower buds', 'name': 'broccoli'}, {'frequency': 'r', 'id': 155, 'synset': 'brooch.n.01', 'synonyms': ['broach'], 'def': 'a decorative pin worn by women', 'name': 'broach'}, {'frequency': 'c', 'id': 156, 'synset': 'broom.n.01', 'synonyms': ['broom'], 'def': 'bundle of straws or twigs attached to a long handle; used for cleaning', 'name': 'broom'}, {'frequency': 'c', 'id': 157, 'synset': 'brownie.n.03', 'synonyms': ['brownie'], 'def': 'square or bar of very rich chocolate cake usually with nuts', 'name': 'brownie'}, {'frequency': 'c', 'id': 158, 'synset': 'brussels_sprouts.n.01', 'synonyms': ['brussels_sprouts'], 'def': 'the small edible cabbage-like buds growing along a stalk', 'name': 'brussels_sprouts'}, {'frequency': 'r', 'id': 159, 'synset': 'bubble_gum.n.01', 'synonyms': ['bubble_gum'], 'def': 'a kind of chewing gum that can be blown into bubbles', 'name': 'bubble_gum'}, {'frequency': 'f', 'id': 160, 'synset': 'bucket.n.01', 'synonyms': ['bucket', 'pail'], 'def': 'a roughly cylindrical vessel that is open at the top', 'name': 'bucket'}, {'frequency': 'r', 'id': 161, 'synset': 'buggy.n.01', 'synonyms': ['horse_buggy'], 'def': 'a small lightweight carriage; drawn by a single horse', 'name': 'horse_buggy'}, {'frequency': 'c', 'id': 162, 'synset': 'bull.n.11', 'synonyms': ['bull'], 'def': 'mature male cow', 'name': 'bull'}, {'frequency': 'r', 'id': 163, 'synset': 'bulldog.n.01', 'synonyms': ['bulldog'], 'def': 'a thickset short-haired dog with a large head and strong undershot lower jaw', 'name': 'bulldog'}, {'frequency': 'r', 'id': 164, 'synset': 'bulldozer.n.01', 'synonyms': ['bulldozer', 'dozer'], 'def': 'large powerful tractor; a large blade in front flattens areas of ground', 'name': 'bulldozer'}, {'frequency': 'c', 'id': 165, 'synset': 'bullet_train.n.01', 'synonyms': ['bullet_train'], 'def': 'a high-speed passenger train', 'name': 'bullet_train'}, {'frequency': 'c', 'id': 166, 'synset': 'bulletin_board.n.02', 'synonyms': ['bulletin_board', 'notice_board'], 'def': 'a board that hangs on a wall; displays announcements', 'name': 'bulletin_board'}, {'frequency': 'r', 'id': 167, 'synset': 'bulletproof_vest.n.01', 'synonyms': ['bulletproof_vest'], 'def': 'a vest capable of resisting the impact of a bullet', 'name': 'bulletproof_vest'}, {'frequency': 'c', 'id': 168, 'synset': 'bullhorn.n.01', 'synonyms': ['bullhorn', 'megaphone'], 'def': 'a portable loudspeaker with built-in microphone and amplifier', 'name': 'bullhorn'}, {'frequency': 'r', 'id': 169, 'synset': 'bully_beef.n.01', 'synonyms': ['corned_beef', 'corn_beef'], 'def': 'beef cured or pickled in brine', 'name': 'corned_beef'}, {'frequency': 'f', 'id': 170, 'synset': 'bun.n.01', 'synonyms': ['bun', 'roll'], 'def': 'small rounded bread either plain or sweet', 'name': 'bun'}, {'frequency': 'c', 'id': 171, 'synset': 'bunk_bed.n.01', 'synonyms': ['bunk_bed'], 'def': 'beds built one above the other', 'name': 'bunk_bed'}, {'frequency': 'f', 'id': 172, 'synset': 'buoy.n.01', 'synonyms': ['buoy'], 'def': 'a float attached by rope to the seabed to mark channels in a harbor or underwater hazards', 'name': 'buoy'}, {'frequency': 'r', 'id': 173, 'synset': 'burrito.n.01', 'synonyms': ['burrito'], 'def': 'a flour tortilla folded around a filling', 'name': 'burrito'}, {'frequency': 'f', 'id': 174, 'synset': 'bus.n.01', 'synonyms': ['bus_(vehicle)', 'autobus', 'charabanc', 'double-decker', 'motorbus', 'motorcoach'], 'def': 'a vehicle carrying many passengers; used for public transport', 'name': 'bus_(vehicle)'}, {'frequency': 'c', 'id': 175, 'synset': 'business_card.n.01', 'synonyms': ['business_card'], 'def': "a card on which are printed the person's name and business affiliation", 'name': 'business_card'}, {'frequency': 'c', 'id': 176, 'synset': 'butcher_knife.n.01', 'synonyms': ['butcher_knife'], 'def': 'a large sharp knife for cutting or trimming meat', 'name': 'butcher_knife'}, {'frequency': 'c', 'id': 177, 'synset': 'butter.n.01', 'synonyms': ['butter'], 'def': 'an edible emulsion of fat globules made by churning milk or cream; for cooking and table use', 'name': 'butter'}, {'frequency': 'c', 'id': 178, 'synset': 'butterfly.n.01', 'synonyms': ['butterfly'], 'def': 'insect typically having a slender body with knobbed antennae and broad colorful wings', 'name': 'butterfly'}, {'frequency': 'f', 'id': 179, 'synset': 'button.n.01', 'synonyms': ['button'], 'def': 'a round fastener sewn to shirts and coats etc to fit through buttonholes', 'name': 'button'}, {'frequency': 'f', 'id': 180, 'synset': 'cab.n.03', 'synonyms': ['cab_(taxi)', 'taxi', 'taxicab'], 'def': 'a car that takes passengers where they want to go in exchange for money', 'name': 'cab_(taxi)'}, {'frequency': 'r', 'id': 181, 'synset': 'cabana.n.01', 'synonyms': ['cabana'], 'def': 'a small tent used as a dressing room beside the sea or a swimming pool', 'name': 'cabana'}, {'frequency': 'r', 'id': 182, 'synset': 'cabin_car.n.01', 'synonyms': ['cabin_car', 'caboose'], 'def': 'a car on a freight train for use of the train crew; usually the last car on the train', 'name': 'cabin_car'}, {'frequency': 'f', 'id': 183, 'synset': 'cabinet.n.01', 'synonyms': ['cabinet'], 'def': 'a piece of furniture resembling a cupboard with doors and shelves and drawers', 'name': 'cabinet'}, {'frequency': 'r', 'id': 184, 'synset': 'cabinet.n.03', 'synonyms': ['locker', 'storage_locker'], 'def': 'a storage compartment for clothes and valuables; usually it has a lock', 'name': 'locker'}, {'frequency': 'f', 'id': 185, 'synset': 'cake.n.03', 'synonyms': ['cake'], 'def': 'baked goods made from or based on a mixture of flour, sugar, eggs, and fat', 'name': 'cake'}, {'frequency': 'c', 'id': 186, 'synset': 'calculator.n.02', 'synonyms': ['calculator'], 'def': 'a small machine that is used for mathematical calculations', 'name': 'calculator'}, {'frequency': 'f', 'id': 187, 'synset': 'calendar.n.02', 'synonyms': ['calendar'], 'def': 'a list or register of events (appointments/social events/court cases, etc)', 'name': 'calendar'}, {'frequency': 'c', 'id': 188, 'synset': 'calf.n.01', 'synonyms': ['calf'], 'def': 'young of domestic cattle', 'name': 'calf'}, {'frequency': 'c', 'id': 189, 'synset': 'camcorder.n.01', 'synonyms': ['camcorder'], 'def': 'a portable television camera and videocassette recorder', 'name': 'camcorder'}, {'frequency': 'c', 'id': 190, 'synset': 'camel.n.01', 'synonyms': ['camel'], 'def': 'cud-chewing mammal used as a draft or saddle animal in desert regions', 'name': 'camel'}, {'frequency': 'f', 'id': 191, 'synset': 'camera.n.01', 'synonyms': ['camera'], 'def': 'equipment for taking photographs', 'name': 'camera'}, {'frequency': 'c', 'id': 192, 'synset': 'camera_lens.n.01', 'synonyms': ['camera_lens'], 'def': 'a lens that focuses the image in a camera', 'name': 'camera_lens'}, {'frequency': 'c', 'id': 193, 'synset': 'camper.n.02', 'synonyms': ['camper_(vehicle)', 'camping_bus', 'motor_home'], 'def': 'a recreational vehicle equipped for camping out while traveling', 'name': 'camper_(vehicle)'}, {'frequency': 'f', 'id': 194, 'synset': 'can.n.01', 'synonyms': ['can', 'tin_can'], 'def': 'airtight sealed metal container for food or drink or paint etc.', 'name': 'can'}, {'frequency': 'c', 'id': 195, 'synset': 'can_opener.n.01', 'synonyms': ['can_opener', 'tin_opener'], 'def': 'a device for cutting cans open', 'name': 'can_opener'}, {'frequency': 'r', 'id': 196, 'synset': 'candelabrum.n.01', 'synonyms': ['candelabrum', 'candelabra'], 'def': 'branched candlestick; ornamental; has several lights', 'name': 'candelabrum'}, {'frequency': 'f', 'id': 197, 'synset': 'candle.n.01', 'synonyms': ['candle', 'candlestick'], 'def': 'stick of wax with a wick in the middle', 'name': 'candle'}, {'frequency': 'f', 'id': 198, 'synset': 'candlestick.n.01', 'synonyms': ['candle_holder'], 'def': 'a holder with sockets for candles', 'name': 'candle_holder'}, {'frequency': 'r', 'id': 199, 'synset': 'candy_bar.n.01', 'synonyms': ['candy_bar'], 'def': 'a candy shaped as a bar', 'name': 'candy_bar'}, {'frequency': 'c', 'id': 200, 'synset': 'candy_cane.n.01', 'synonyms': ['candy_cane'], 'def': 'a hard candy in the shape of a rod (usually with stripes)', 'name': 'candy_cane'}, {'frequency': 'c', 'id': 201, 'synset': 'cane.n.01', 'synonyms': ['walking_cane'], 'def': 'a stick that people can lean on to help them walk', 'name': 'walking_cane'}, {'frequency': 'c', 'id': 202, 'synset': 'canister.n.02', 'synonyms': ['canister', 'cannister'], 'def': 'metal container for storing dry foods such as tea or flour', 'name': 'canister'}, {'frequency': 'r', 'id': 203, 'synset': 'cannon.n.02', 'synonyms': ['cannon'], 'def': 'heavy gun fired from a tank', 'name': 'cannon'}, {'frequency': 'c', 'id': 204, 'synset': 'canoe.n.01', 'synonyms': ['canoe'], 'def': 'small and light boat; pointed at both ends; propelled with a paddle', 'name': 'canoe'}, {'frequency': 'r', 'id': 205, 'synset': 'cantaloup.n.02', 'synonyms': ['cantaloup', 'cantaloupe'], 'def': 'the fruit of a cantaloup vine; small to medium-sized melon with yellowish flesh', 'name': 'cantaloup'}, {'frequency': 'r', 'id': 206, 'synset': 'canteen.n.01', 'synonyms': ['canteen'], 'def': 'a flask for carrying water; used by soldiers or travelers', 'name': 'canteen'}, {'frequency': 'c', 'id': 207, 'synset': 'cap.n.01', 'synonyms': ['cap_(headwear)'], 'def': 'a tight-fitting headwear', 'name': 'cap_(headwear)'}, {'frequency': 'f', 'id': 208, 'synset': 'cap.n.02', 'synonyms': ['bottle_cap', 'cap_(container_lid)'], 'def': 'a top (as for a bottle)', 'name': 'bottle_cap'}, {'frequency': 'r', 'id': 209, 'synset': 'cape.n.02', 'synonyms': ['cape'], 'def': 'a sleeveless garment like a cloak but shorter', 'name': 'cape'}, {'frequency': 'c', 'id': 210, 'synset': 'cappuccino.n.01', 'synonyms': ['cappuccino', 'coffee_cappuccino'], 'def': 'equal parts of espresso and steamed milk', 'name': 'cappuccino'}, {'frequency': 'f', 'id': 211, 'synset': 'car.n.01', 'synonyms': ['car_(automobile)', 'auto_(automobile)', 'automobile'], 'def': 'a motor vehicle with four wheels', 'name': 'car_(automobile)'}, {'frequency': 'f', 'id': 212, 'synset': 'car.n.02', 'synonyms': ['railcar_(part_of_a_train)', 'railway_car_(part_of_a_train)', 'railroad_car_(part_of_a_train)'], 'def': 'a wheeled vehicle adapted to the rails of railroad', 'name': 'railcar_(part_of_a_train)'}, {'frequency': 'r', 'id': 213, 'synset': 'car.n.04', 'synonyms': ['elevator_car'], 'def': 'where passengers ride up and down', 'name': 'elevator_car'}, {'frequency': 'r', 'id': 214, 'synset': 'car_battery.n.01', 'synonyms': ['car_battery', 'automobile_battery'], 'def': 'a battery in a motor vehicle', 'name': 'car_battery'}, {'frequency': 'c', 'id': 215, 'synset': 'card.n.02', 'synonyms': ['identity_card'], 'def': 'a card certifying the identity of the bearer', 'name': 'identity_card'}, {'frequency': 'c', 'id': 216, 'synset': 'card.n.03', 'synonyms': ['card'], 'def': 'a rectangular piece of paper used to send messages (e.g. greetings or pictures)', 'name': 'card'}, {'frequency': 'r', 'id': 217, 'synset': 'cardigan.n.01', 'synonyms': ['cardigan'], 'def': 'knitted jacket that is fastened up the front with buttons or a zipper', 'name': 'cardigan'}, {'frequency': 'r', 'id': 218, 'synset': 'cargo_ship.n.01', 'synonyms': ['cargo_ship', 'cargo_vessel'], 'def': 'a ship designed to carry cargo', 'name': 'cargo_ship'}, {'frequency': 'r', 'id': 219, 'synset': 'carnation.n.01', 'synonyms': ['carnation'], 'def': 'plant with pink to purple-red spice-scented usually double flowers', 'name': 'carnation'}, {'frequency': 'c', 'id': 220, 'synset': 'carriage.n.02', 'synonyms': ['horse_carriage'], 'def': 'a vehicle with wheels drawn by one or more horses', 'name': 'horse_carriage'}, {'frequency': 'f', 'id': 221, 'synset': 'carrot.n.01', 'synonyms': ['carrot'], 'def': 'deep orange edible root of the cultivated carrot plant', 'name': 'carrot'}, {'frequency': 'c', 'id': 222, 'synset': 'carryall.n.01', 'synonyms': ['tote_bag'], 'def': 'a capacious bag or basket', 'name': 'tote_bag'}, {'frequency': 'c', 'id': 223, 'synset': 'cart.n.01', 'synonyms': ['cart'], 'def': 'a heavy open wagon usually having two wheels and drawn by an animal', 'name': 'cart'}, {'frequency': 'c', 'id': 224, 'synset': 'carton.n.02', 'synonyms': ['carton'], 'def': 'a box made of cardboard; opens by flaps on top', 'name': 'carton'}, {'frequency': 'c', 'id': 225, 'synset': 'cash_register.n.01', 'synonyms': ['cash_register', 'register_(for_cash_transactions)'], 'def': 'a cashbox with an adding machine to register transactions', 'name': 'cash_register'}, {'frequency': 'r', 'id': 226, 'synset': 'casserole.n.01', 'synonyms': ['casserole'], 'def': 'food cooked and served in a casserole', 'name': 'casserole'}, {'frequency': 'r', 'id': 227, 'synset': 'cassette.n.01', 'synonyms': ['cassette'], 'def': 'a container that holds a magnetic tape used for recording or playing sound or video', 'name': 'cassette'}, {'frequency': 'c', 'id': 228, 'synset': 'cast.n.05', 'synonyms': ['cast', 'plaster_cast', 'plaster_bandage'], 'def': 'bandage consisting of a firm covering that immobilizes broken bones while they heal', 'name': 'cast'}, {'frequency': 'f', 'id': 229, 'synset': 'cat.n.01', 'synonyms': ['cat'], 'def': 'a domestic house cat', 'name': 'cat'}, {'frequency': 'c', 'id': 230, 'synset': 'cauliflower.n.02', 'synonyms': ['cauliflower'], 'def': 'edible compact head of white undeveloped flowers', 'name': 'cauliflower'}, {'frequency': 'r', 'id': 231, 'synset': 'caviar.n.01', 'synonyms': ['caviar', 'caviare'], 'def': "salted roe of sturgeon or other large fish; usually served as an hors d'oeuvre", 'name': 'caviar'}, {'frequency': 'c', 'id': 232, 'synset': 'cayenne.n.02', 'synonyms': ['cayenne_(spice)', 'cayenne_pepper_(spice)', 'red_pepper_(spice)'], 'def': 'ground pods and seeds of pungent red peppers of the genus Capsicum', 'name': 'cayenne_(spice)'}, {'frequency': 'c', 'id': 233, 'synset': 'cd_player.n.01', 'synonyms': ['CD_player'], 'def': 'electronic equipment for playing compact discs (CDs)', 'name': 'CD_player'}, {'frequency': 'c', 'id': 234, 'synset': 'celery.n.01', 'synonyms': ['celery'], 'def': 'widely cultivated herb with aromatic leaf stalks that are eaten raw or cooked', 'name': 'celery'}, {'frequency': 'f', 'id': 235, 'synset': 'cellular_telephone.n.01', 'synonyms': ['cellular_telephone', 'cellular_phone', 'cellphone', 'mobile_phone', 'smart_phone'], 'def': 'a hand-held mobile telephone', 'name': 'cellular_telephone'}, {'frequency': 'r', 'id': 236, 'synset': 'chain_mail.n.01', 'synonyms': ['chain_mail', 'ring_mail', 'chain_armor', 'chain_armour', 'ring_armor', 'ring_armour'], 'def': '(Middle Ages) flexible armor made of interlinked metal rings', 'name': 'chain_mail'}, {'frequency': 'f', 'id': 237, 'synset': 'chair.n.01', 'synonyms': ['chair'], 'def': 'a seat for one person, with a support for the back', 'name': 'chair'}, {'frequency': 'r', 'id': 238, 'synset': 'chaise_longue.n.01', 'synonyms': ['chaise_longue', 'chaise', 'daybed'], 'def': 'a long chair; for reclining', 'name': 'chaise_longue'}, {'frequency': 'r', 'id': 239, 'synset': 'champagne.n.01', 'synonyms': ['champagne'], 'def': 'a white sparkling wine produced in Champagne or resembling that produced there', 'name': 'champagne'}, {'frequency': 'f', 'id': 240, 'synset': 'chandelier.n.01', 'synonyms': ['chandelier'], 'def': 'branched lighting fixture; often ornate; hangs from the ceiling', 'name': 'chandelier'}, {'frequency': 'r', 'id': 241, 'synset': 'chap.n.04', 'synonyms': ['chap'], 'def': 'leather leggings without a seat; worn over trousers by cowboys to protect their legs', 'name': 'chap'}, {'frequency': 'r', 'id': 242, 'synset': 'checkbook.n.01', 'synonyms': ['checkbook', 'chequebook'], 'def': 'a book issued to holders of checking accounts', 'name': 'checkbook'}, {'frequency': 'r', 'id': 243, 'synset': 'checkerboard.n.01', 'synonyms': ['checkerboard'], 'def': 'a board having 64 squares of two alternating colors', 'name': 'checkerboard'}, {'frequency': 'c', 'id': 244, 'synset': 'cherry.n.03', 'synonyms': ['cherry'], 'def': 'a red fruit with a single hard stone', 'name': 'cherry'}, {'frequency': 'r', 'id': 245, 'synset': 'chessboard.n.01', 'synonyms': ['chessboard'], 'def': 'a checkerboard used to play chess', 'name': 'chessboard'}, {'frequency': 'r', 'id': 246, 'synset': 'chest_of_drawers.n.01', 'synonyms': ['chest_of_drawers_(furniture)', 'bureau_(furniture)', 'chest_(furniture)'], 'def': 'furniture with drawers for keeping clothes', 'name': 'chest_of_drawers_(furniture)'}, {'frequency': 'c', 'id': 247, 'synset': 'chicken.n.02', 'synonyms': ['chicken_(animal)'], 'def': 'a domestic fowl bred for flesh or eggs', 'name': 'chicken_(animal)'}, {'frequency': 'c', 'id': 248, 'synset': 'chicken_wire.n.01', 'synonyms': ['chicken_wire'], 'def': 'a galvanized wire network with a hexagonal mesh; used to build fences', 'name': 'chicken_wire'}, {'frequency': 'r', 'id': 249, 'synset': 'chickpea.n.01', 'synonyms': ['chickpea', 'garbanzo'], 'def': 'the seed of the chickpea plant; usually dried', 'name': 'chickpea'}, {'frequency': 'r', 'id': 250, 'synset': 'chihuahua.n.03', 'synonyms': ['Chihuahua'], 'def': 'an old breed of tiny short-haired dog with protruding eyes from Mexico', 'name': 'Chihuahua'}, {'frequency': 'r', 'id': 251, 'synset': 'chili.n.02', 'synonyms': ['chili_(vegetable)', 'chili_pepper_(vegetable)', 'chilli_(vegetable)', 'chilly_(vegetable)', 'chile_(vegetable)'], 'def': 'very hot and finely tapering pepper of special pungency', 'name': 'chili_(vegetable)'}, {'frequency': 'r', 'id': 252, 'synset': 'chime.n.01', 'synonyms': ['chime', 'gong'], 'def': 'an instrument consisting of a set of bells that are struck with a hammer', 'name': 'chime'}, {'frequency': 'r', 'id': 253, 'synset': 'chinaware.n.01', 'synonyms': ['chinaware'], 'def': 'dishware made of high quality porcelain', 'name': 'chinaware'}, {'frequency': 'c', 'id': 254, 'synset': 'chip.n.04', 'synonyms': ['crisp_(potato_chip)', 'potato_chip'], 'def': 'a thin crisp slice of potato fried in deep fat', 'name': 'crisp_(potato_chip)'}, {'frequency': 'r', 'id': 255, 'synset': 'chip.n.06', 'synonyms': ['poker_chip'], 'def': 'a small disk-shaped counter used to represent money when gambling', 'name': 'poker_chip'}, {'frequency': 'c', 'id': 256, 'synset': 'chocolate_bar.n.01', 'synonyms': ['chocolate_bar'], 'def': 'a bar of chocolate candy', 'name': 'chocolate_bar'}, {'frequency': 'c', 'id': 257, 'synset': 'chocolate_cake.n.01', 'synonyms': ['chocolate_cake'], 'def': 'cake containing chocolate', 'name': 'chocolate_cake'}, {'frequency': 'r', 'id': 258, 'synset': 'chocolate_milk.n.01', 'synonyms': ['chocolate_milk'], 'def': 'milk flavored with chocolate syrup', 'name': 'chocolate_milk'}, {'frequency': 'r', 'id': 259, 'synset': 'chocolate_mousse.n.01', 'synonyms': ['chocolate_mousse'], 'def': 'dessert mousse made with chocolate', 'name': 'chocolate_mousse'}, {'frequency': 'f', 'id': 260, 'synset': 'choker.n.03', 'synonyms': ['choker', 'collar', 'neckband'], 'def': 'necklace that fits tightly around the neck', 'name': 'choker'}, {'frequency': 'f', 'id': 261, 'synset': 'chopping_board.n.01', 'synonyms': ['chopping_board', 'cutting_board', 'chopping_block'], 'def': 'a wooden board where meats or vegetables can be cut', 'name': 'chopping_board'}, {'frequency': 'c', 'id': 262, 'synset': 'chopstick.n.01', 'synonyms': ['chopstick'], 'def': 'one of a pair of slender sticks used as oriental tableware to eat food with', 'name': 'chopstick'}, {'frequency': 'f', 'id': 263, 'synset': 'christmas_tree.n.05', 'synonyms': ['Christmas_tree'], 'def': 'an ornamented evergreen used as a Christmas decoration', 'name': 'Christmas_tree'}, {'frequency': 'c', 'id': 264, 'synset': 'chute.n.02', 'synonyms': ['slide'], 'def': 'sloping channel through which things can descend', 'name': 'slide'}, {'frequency': 'r', 'id': 265, 'synset': 'cider.n.01', 'synonyms': ['cider', 'cyder'], 'def': 'a beverage made from juice pressed from apples', 'name': 'cider'}, {'frequency': 'r', 'id': 266, 'synset': 'cigar_box.n.01', 'synonyms': ['cigar_box'], 'def': 'a box for holding cigars', 'name': 'cigar_box'}, {'frequency': 'c', 'id': 267, 'synset': 'cigarette.n.01', 'synonyms': ['cigarette'], 'def': 'finely ground tobacco wrapped in paper; for smoking', 'name': 'cigarette'}, {'frequency': 'c', 'id': 268, 'synset': 'cigarette_case.n.01', 'synonyms': ['cigarette_case', 'cigarette_pack'], 'def': 'a small flat case for holding cigarettes', 'name': 'cigarette_case'}, {'frequency': 'f', 'id': 269, 'synset': 'cistern.n.02', 'synonyms': ['cistern', 'water_tank'], 'def': 'a tank that holds the water used to flush a toilet', 'name': 'cistern'}, {'frequency': 'r', 'id': 270, 'synset': 'clarinet.n.01', 'synonyms': ['clarinet'], 'def': 'a single-reed instrument with a straight tube', 'name': 'clarinet'}, {'frequency': 'r', 'id': 271, 'synset': 'clasp.n.01', 'synonyms': ['clasp'], 'def': 'a fastener (as a buckle or hook) that is used to hold two things together', 'name': 'clasp'}, {'frequency': 'c', 'id': 272, 'synset': 'cleansing_agent.n.01', 'synonyms': ['cleansing_agent', 'cleanser', 'cleaner'], 'def': 'a preparation used in cleaning something', 'name': 'cleansing_agent'}, {'frequency': 'r', 'id': 273, 'synset': 'clementine.n.01', 'synonyms': ['clementine'], 'def': 'a variety of mandarin orange', 'name': 'clementine'}, {'frequency': 'c', 'id': 274, 'synset': 'clip.n.03', 'synonyms': ['clip'], 'def': 'any of various small fasteners used to hold loose articles together', 'name': 'clip'}, {'frequency': 'c', 'id': 275, 'synset': 'clipboard.n.01', 'synonyms': ['clipboard'], 'def': 'a small writing board with a clip at the top for holding papers', 'name': 'clipboard'}, {'frequency': 'f', 'id': 276, 'synset': 'clock.n.01', 'synonyms': ['clock', 'timepiece', 'timekeeper'], 'def': 'a timepiece that shows the time of day', 'name': 'clock'}, {'frequency': 'f', 'id': 277, 'synset': 'clock_tower.n.01', 'synonyms': ['clock_tower'], 'def': 'a tower with a large clock visible high up on an outside face', 'name': 'clock_tower'}, {'frequency': 'c', 'id': 278, 'synset': 'clothes_hamper.n.01', 'synonyms': ['clothes_hamper', 'laundry_basket', 'clothes_basket'], 'def': 'a hamper that holds dirty clothes to be washed or wet clothes to be dried', 'name': 'clothes_hamper'}, {'frequency': 'c', 'id': 279, 'synset': 'clothespin.n.01', 'synonyms': ['clothespin', 'clothes_peg'], 'def': 'wood or plastic fastener; for holding clothes on a clothesline', 'name': 'clothespin'}, {'frequency': 'r', 'id': 280, 'synset': 'clutch_bag.n.01', 'synonyms': ['clutch_bag'], 'def': "a woman's strapless purse that is carried in the hand", 'name': 'clutch_bag'}, {'frequency': 'f', 'id': 281, 'synset': 'coaster.n.03', 'synonyms': ['coaster'], 'def': 'a covering (plate or mat) that protects the surface of a table', 'name': 'coaster'}, {'frequency': 'f', 'id': 282, 'synset': 'coat.n.01', 'synonyms': ['coat'], 'def': 'an outer garment that has sleeves and covers the body from shoulder down', 'name': 'coat'}, {'frequency': 'c', 'id': 283, 'synset': 'coat_hanger.n.01', 'synonyms': ['coat_hanger', 'clothes_hanger', 'dress_hanger'], 'def': "a hanger that is shaped like a person's shoulders", 'name': 'coat_hanger'}, {'frequency': 'r', 'id': 284, 'synset': 'coatrack.n.01', 'synonyms': ['coatrack', 'hatrack'], 'def': 'a rack with hooks for temporarily holding coats and hats', 'name': 'coatrack'}, {'frequency': 'c', 'id': 285, 'synset': 'cock.n.04', 'synonyms': ['cock', 'rooster'], 'def': 'adult male chicken', 'name': 'cock'}, {'frequency': 'c', 'id': 286, 'synset': 'coconut.n.02', 'synonyms': ['coconut', 'cocoanut'], 'def': 'large hard-shelled brown oval nut with a fibrous husk', 'name': 'coconut'}, {'frequency': 'r', 'id': 287, 'synset': 'coffee_filter.n.01', 'synonyms': ['coffee_filter'], 'def': 'filter (usually of paper) that passes the coffee and retains the coffee grounds', 'name': 'coffee_filter'}, {'frequency': 'f', 'id': 288, 'synset': 'coffee_maker.n.01', 'synonyms': ['coffee_maker', 'coffee_machine'], 'def': 'a kitchen appliance for brewing coffee automatically', 'name': 'coffee_maker'}, {'frequency': 'f', 'id': 289, 'synset': 'coffee_table.n.01', 'synonyms': ['coffee_table', 'cocktail_table'], 'def': 'low table where magazines can be placed and coffee or cocktails are served', 'name': 'coffee_table'}, {'frequency': 'c', 'id': 290, 'synset': 'coffeepot.n.01', 'synonyms': ['coffeepot'], 'def': 'tall pot in which coffee is brewed', 'name': 'coffeepot'}, {'frequency': 'r', 'id': 291, 'synset': 'coil.n.05', 'synonyms': ['coil'], 'def': 'tubing that is wound in a spiral', 'name': 'coil'}, {'frequency': 'c', 'id': 292, 'synset': 'coin.n.01', 'synonyms': ['coin'], 'def': 'a flat metal piece (usually a disc) used as money', 'name': 'coin'}, {'frequency': 'r', 'id': 293, 'synset': 'colander.n.01', 'synonyms': ['colander', 'cullender'], 'def': 'bowl-shaped strainer; used to wash or drain foods', 'name': 'colander'}, {'frequency': 'c', 'id': 294, 'synset': 'coleslaw.n.01', 'synonyms': ['coleslaw', 'slaw'], 'def': 'basically shredded cabbage', 'name': 'coleslaw'}, {'frequency': 'r', 'id': 295, 'synset': 'coloring_material.n.01', 'synonyms': ['coloring_material', 'colouring_material'], 'def': 'any material used for its color', 'name': 'coloring_material'}, {'frequency': 'r', 'id': 296, 'synset': 'combination_lock.n.01', 'synonyms': ['combination_lock'], 'def': 'lock that can be opened only by turning dials in a special sequence', 'name': 'combination_lock'}, {'frequency': 'c', 'id': 297, 'synset': 'comforter.n.04', 'synonyms': ['pacifier', 'teething_ring'], 'def': 'device used for an infant to suck or bite on', 'name': 'pacifier'}, {'frequency': 'r', 'id': 298, 'synset': 'comic_book.n.01', 'synonyms': ['comic_book'], 'def': 'a magazine devoted to comic strips', 'name': 'comic_book'}, {'frequency': 'f', 'id': 299, 'synset': 'computer_keyboard.n.01', 'synonyms': ['computer_keyboard', 'keyboard_(computer)'], 'def': 'a keyboard that is a data input device for computers', 'name': 'computer_keyboard'}, {'frequency': 'r', 'id': 300, 'synset': 'concrete_mixer.n.01', 'synonyms': ['concrete_mixer', 'cement_mixer'], 'def': 'a machine with a large revolving drum in which cement/concrete is mixed', 'name': 'concrete_mixer'}, {'frequency': 'f', 'id': 301, 'synset': 'cone.n.01', 'synonyms': ['cone', 'traffic_cone'], 'def': 'a cone-shaped object used to direct traffic', 'name': 'cone'}, {'frequency': 'f', 'id': 302, 'synset': 'control.n.09', 'synonyms': ['control', 'controller'], 'def': 'a mechanism that controls the operation of a machine', 'name': 'control'}, {'frequency': 'r', 'id': 303, 'synset': 'convertible.n.01', 'synonyms': ['convertible_(automobile)'], 'def': 'a car that has top that can be folded or removed', 'name': 'convertible_(automobile)'}, {'frequency': 'r', 'id': 304, 'synset': 'convertible.n.03', 'synonyms': ['sofa_bed'], 'def': 'a sofa that can be converted into a bed', 'name': 'sofa_bed'}, {'frequency': 'c', 'id': 305, 'synset': 'cookie.n.01', 'synonyms': ['cookie', 'cooky', 'biscuit_(cookie)'], 'def': "any of various small flat sweet cakes (`biscuit' is the British term)", 'name': 'cookie'}, {'frequency': 'r', 'id': 306, 'synset': 'cookie_jar.n.01', 'synonyms': ['cookie_jar', 'cooky_jar'], 'def': 'a jar in which cookies are kept (and sometimes money is hidden)', 'name': 'cookie_jar'}, {'frequency': 'r', 'id': 307, 'synset': 'cooking_utensil.n.01', 'synonyms': ['cooking_utensil'], 'def': 'a kitchen utensil made of material that does not melt easily; used for cooking', 'name': 'cooking_utensil'}, {'frequency': 'f', 'id': 308, 'synset': 'cooler.n.01', 'synonyms': ['cooler_(for_food)', 'ice_chest'], 'def': 'an insulated box for storing food often with ice', 'name': 'cooler_(for_food)'}, {'frequency': 'c', 'id': 309, 'synset': 'cork.n.04', 'synonyms': ['cork_(bottle_plug)', 'bottle_cork'], 'def': 'the plug in the mouth of a bottle (especially a wine bottle)', 'name': 'cork_(bottle_plug)'}, {'frequency': 'r', 'id': 310, 'synset': 'corkboard.n.01', 'synonyms': ['corkboard'], 'def': 'a sheet consisting of cork granules', 'name': 'corkboard'}, {'frequency': 'r', 'id': 311, 'synset': 'corkscrew.n.01', 'synonyms': ['corkscrew', 'bottle_screw'], 'def': 'a bottle opener that pulls corks', 'name': 'corkscrew'}, {'frequency': 'c', 'id': 312, 'synset': 'corn.n.03', 'synonyms': ['edible_corn', 'corn', 'maize'], 'def': 'ears of corn that can be prepared and served for human food', 'name': 'edible_corn'}, {'frequency': 'r', 'id': 313, 'synset': 'cornbread.n.01', 'synonyms': ['cornbread'], 'def': 'bread made primarily of cornmeal', 'name': 'cornbread'}, {'frequency': 'c', 'id': 314, 'synset': 'cornet.n.01', 'synonyms': ['cornet', 'horn', 'trumpet'], 'def': 'a brass musical instrument with a narrow tube and a flared bell and many valves', 'name': 'cornet'}, {'frequency': 'c', 'id': 315, 'synset': 'cornice.n.01', 'synonyms': ['cornice', 'valance', 'valance_board', 'pelmet'], 'def': 'a decorative framework to conceal curtain fixtures at the top of a window casing', 'name': 'cornice'}, {'frequency': 'r', 'id': 316, 'synset': 'cornmeal.n.01', 'synonyms': ['cornmeal'], 'def': 'coarsely ground corn', 'name': 'cornmeal'}, {'frequency': 'r', 'id': 317, 'synset': 'corset.n.01', 'synonyms': ['corset', 'girdle'], 'def': "a woman's close-fitting foundation garment", 'name': 'corset'}, {'frequency': 'r', 'id': 318, 'synset': 'cos.n.02', 'synonyms': ['romaine_lettuce'], 'def': 'lettuce with long dark-green leaves in a loosely packed elongated head', 'name': 'romaine_lettuce'}, {'frequency': 'c', 'id': 319, 'synset': 'costume.n.04', 'synonyms': ['costume'], 'def': 'the attire characteristic of a country or a time or a social class', 'name': 'costume'}, {'frequency': 'r', 'id': 320, 'synset': 'cougar.n.01', 'synonyms': ['cougar', 'puma', 'catamount', 'mountain_lion', 'panther'], 'def': 'large American feline resembling a lion', 'name': 'cougar'}, {'frequency': 'r', 'id': 321, 'synset': 'coverall.n.01', 'synonyms': ['coverall'], 'def': 'a loose-fitting protective garment that is worn over other clothing', 'name': 'coverall'}, {'frequency': 'r', 'id': 322, 'synset': 'cowbell.n.01', 'synonyms': ['cowbell'], 'def': 'a bell hung around the neck of cow so that the cow can be easily located', 'name': 'cowbell'}, {'frequency': 'f', 'id': 323, 'synset': 'cowboy_hat.n.01', 'synonyms': ['cowboy_hat', 'ten-gallon_hat'], 'def': 'a hat with a wide brim and a soft crown; worn by American ranch hands', 'name': 'cowboy_hat'}, {'frequency': 'r', 'id': 324, 'synset': 'crab.n.01', 'synonyms': ['crab_(animal)'], 'def': 'decapod having eyes on short stalks and a broad flattened shell and pincers', 'name': 'crab_(animal)'}, {'frequency': 'c', 'id': 325, 'synset': 'cracker.n.01', 'synonyms': ['cracker'], 'def': 'a thin crisp wafer', 'name': 'cracker'}, {'frequency': 'r', 'id': 326, 'synset': 'crape.n.01', 'synonyms': ['crape', 'crepe', 'French_pancake'], 'def': 'small very thin pancake', 'name': 'crape'}, {'frequency': 'f', 'id': 327, 'synset': 'crate.n.01', 'synonyms': ['crate'], 'def': 'a rugged box (usually made of wood); used for shipping', 'name': 'crate'}, {'frequency': 'r', 'id': 328, 'synset': 'crayon.n.01', 'synonyms': ['crayon', 'wax_crayon'], 'def': 'writing or drawing implement made of a colored stick of composition wax', 'name': 'crayon'}, {'frequency': 'r', 'id': 329, 'synset': 'cream_pitcher.n.01', 'synonyms': ['cream_pitcher'], 'def': 'a small pitcher for serving cream', 'name': 'cream_pitcher'}, {'frequency': 'r', 'id': 330, 'synset': 'credit_card.n.01', 'synonyms': ['credit_card', 'charge_card', 'debit_card'], 'def': 'a card, usually plastic, used to pay for goods and services', 'name': 'credit_card'}, {'frequency': 'c', 'id': 331, 'synset': 'crescent_roll.n.01', 'synonyms': ['crescent_roll', 'croissant'], 'def': 'very rich flaky crescent-shaped roll', 'name': 'crescent_roll'}, {'frequency': 'c', 'id': 332, 'synset': 'crib.n.01', 'synonyms': ['crib', 'cot'], 'def': 'baby bed with high sides made of slats', 'name': 'crib'}, {'frequency': 'c', 'id': 333, 'synset': 'crock.n.03', 'synonyms': ['crock_pot', 'earthenware_jar'], 'def': 'an earthen jar (made of baked clay)', 'name': 'crock_pot'}, {'frequency': 'f', 'id': 334, 'synset': 'crossbar.n.01', 'synonyms': ['crossbar'], 'def': 'a horizontal bar that goes across something', 'name': 'crossbar'}, {'frequency': 'r', 'id': 335, 'synset': 'crouton.n.01', 'synonyms': ['crouton'], 'def': 'a small piece of toasted or fried bread; served in soup or salads', 'name': 'crouton'}, {'frequency': 'r', 'id': 336, 'synset': 'crow.n.01', 'synonyms': ['crow'], 'def': 'black birds having a raucous call', 'name': 'crow'}, {'frequency': 'c', 'id': 337, 'synset': 'crown.n.04', 'synonyms': ['crown'], 'def': 'an ornamental jeweled headdress signifying sovereignty', 'name': 'crown'}, {'frequency': 'c', 'id': 338, 'synset': 'crucifix.n.01', 'synonyms': ['crucifix'], 'def': 'representation of the cross on which Jesus died', 'name': 'crucifix'}, {'frequency': 'c', 'id': 339, 'synset': 'cruise_ship.n.01', 'synonyms': ['cruise_ship', 'cruise_liner'], 'def': 'a passenger ship used commercially for pleasure cruises', 'name': 'cruise_ship'}, {'frequency': 'c', 'id': 340, 'synset': 'cruiser.n.01', 'synonyms': ['police_cruiser', 'patrol_car', 'police_car', 'squad_car'], 'def': 'a car in which policemen cruise the streets', 'name': 'police_cruiser'}, {'frequency': 'c', 'id': 341, 'synset': 'crumb.n.03', 'synonyms': ['crumb'], 'def': 'small piece of e.g. bread or cake', 'name': 'crumb'}, {'frequency': 'r', 'id': 342, 'synset': 'crutch.n.01', 'synonyms': ['crutch'], 'def': 'a wooden or metal staff that fits under the armpit and reaches to the ground', 'name': 'crutch'}, {'frequency': 'c', 'id': 343, 'synset': 'cub.n.03', 'synonyms': ['cub_(animal)'], 'def': 'the young of certain carnivorous mammals such as the bear or wolf or lion', 'name': 'cub_(animal)'}, {'frequency': 'r', 'id': 344, 'synset': 'cube.n.05', 'synonyms': ['cube', 'square_block'], 'def': 'a block in the (approximate) shape of a cube', 'name': 'cube'}, {'frequency': 'f', 'id': 345, 'synset': 'cucumber.n.02', 'synonyms': ['cucumber', 'cuke'], 'def': 'cylindrical green fruit with thin green rind and white flesh eaten as a vegetable', 'name': 'cucumber'}, {'frequency': 'c', 'id': 346, 'synset': 'cufflink.n.01', 'synonyms': ['cufflink'], 'def': 'jewelry consisting of linked buttons used to fasten the cuffs of a shirt', 'name': 'cufflink'}, {'frequency': 'f', 'id': 347, 'synset': 'cup.n.01', 'synonyms': ['cup'], 'def': 'a small open container usually used for drinking; usually has a handle', 'name': 'cup'}, {'frequency': 'c', 'id': 348, 'synset': 'cup.n.08', 'synonyms': ['trophy_cup'], 'def': 'a metal vessel with handles that is awarded as a trophy to a competition winner', 'name': 'trophy_cup'}, {'frequency': 'c', 'id': 349, 'synset': 'cupcake.n.01', 'synonyms': ['cupcake'], 'def': 'small cake baked in a muffin tin', 'name': 'cupcake'}, {'frequency': 'r', 'id': 350, 'synset': 'curler.n.01', 'synonyms': ['hair_curler', 'hair_roller', 'hair_crimper'], 'def': 'a cylindrical tube around which the hair is wound to curl it', 'name': 'hair_curler'}, {'frequency': 'r', 'id': 351, 'synset': 'curling_iron.n.01', 'synonyms': ['curling_iron'], 'def': 'a cylindrical home appliance that heats hair that has been curled around it', 'name': 'curling_iron'}, {'frequency': 'f', 'id': 352, 'synset': 'curtain.n.01', 'synonyms': ['curtain', 'drapery'], 'def': 'hanging cloth used as a blind (especially for a window)', 'name': 'curtain'}, {'frequency': 'f', 'id': 353, 'synset': 'cushion.n.03', 'synonyms': ['cushion'], 'def': 'a soft bag filled with air or padding such as feathers or foam rubber', 'name': 'cushion'}, {'frequency': 'r', 'id': 354, 'synset': 'custard.n.01', 'synonyms': ['custard'], 'def': 'sweetened mixture of milk and eggs baked or boiled or frozen', 'name': 'custard'}, {'frequency': 'c', 'id': 355, 'synset': 'cutter.n.06', 'synonyms': ['cutting_tool'], 'def': 'a cutting implement; a tool for cutting', 'name': 'cutting_tool'}, {'frequency': 'r', 'id': 356, 'synset': 'cylinder.n.04', 'synonyms': ['cylinder'], 'def': 'a cylindrical container', 'name': 'cylinder'}, {'frequency': 'r', 'id': 357, 'synset': 'cymbal.n.01', 'synonyms': ['cymbal'], 'def': 'a percussion instrument consisting of a concave brass disk', 'name': 'cymbal'}, {'frequency': 'r', 'id': 358, 'synset': 'dachshund.n.01', 'synonyms': ['dachshund', 'dachsie', 'badger_dog'], 'def': 'small long-bodied short-legged breed of dog having a short sleek coat and long drooping ears', 'name': 'dachshund'}, {'frequency': 'r', 'id': 359, 'synset': 'dagger.n.01', 'synonyms': ['dagger'], 'def': 'a short knife with a pointed blade used for piercing or stabbing', 'name': 'dagger'}, {'frequency': 'r', 'id': 360, 'synset': 'dartboard.n.01', 'synonyms': ['dartboard'], 'def': 'a circular board of wood or cork used as the target in the game of darts', 'name': 'dartboard'}, {'frequency': 'r', 'id': 361, 'synset': 'date.n.08', 'synonyms': ['date_(fruit)'], 'def': 'sweet edible fruit of the date palm with a single long woody seed', 'name': 'date_(fruit)'}, {'frequency': 'f', 'id': 362, 'synset': 'deck_chair.n.01', 'synonyms': ['deck_chair', 'beach_chair'], 'def': 'a folding chair for use outdoors; a wooden frame supports a length of canvas', 'name': 'deck_chair'}, {'frequency': 'c', 'id': 363, 'synset': 'deer.n.01', 'synonyms': ['deer', 'cervid'], 'def': "distinguished from Bovidae by the male's having solid deciduous antlers", 'name': 'deer'}, {'frequency': 'c', 'id': 364, 'synset': 'dental_floss.n.01', 'synonyms': ['dental_floss', 'floss'], 'def': 'a soft thread for cleaning the spaces between the teeth', 'name': 'dental_floss'}, {'frequency': 'f', 'id': 365, 'synset': 'desk.n.01', 'synonyms': ['desk'], 'def': 'a piece of furniture with a writing surface and usually drawers or other compartments', 'name': 'desk'}, {'frequency': 'r', 'id': 366, 'synset': 'detergent.n.01', 'synonyms': ['detergent'], 'def': 'a surface-active chemical widely used in industry and laundering', 'name': 'detergent'}, {'frequency': 'c', 'id': 367, 'synset': 'diaper.n.01', 'synonyms': ['diaper'], 'def': 'garment consisting of a folded cloth drawn up between the legs and fastened at the waist', 'name': 'diaper'}, {'frequency': 'r', 'id': 368, 'synset': 'diary.n.01', 'synonyms': ['diary', 'journal'], 'def': 'a daily written record of (usually personal) experiences and observations', 'name': 'diary'}, {'frequency': 'r', 'id': 369, 'synset': 'die.n.01', 'synonyms': ['die', 'dice'], 'def': 'a small cube with 1 to 6 spots on the six faces; used in gambling', 'name': 'die'}, {'frequency': 'r', 'id': 370, 'synset': 'dinghy.n.01', 'synonyms': ['dinghy', 'dory', 'rowboat'], 'def': 'a small boat of shallow draft with seats and oars with which it is propelled', 'name': 'dinghy'}, {'frequency': 'f', 'id': 371, 'synset': 'dining_table.n.01', 'synonyms': ['dining_table'], 'def': 'a table at which meals are served', 'name': 'dining_table'}, {'frequency': 'r', 'id': 372, 'synset': 'dinner_jacket.n.01', 'synonyms': ['tux', 'tuxedo'], 'def': 'semiformal evening dress for men', 'name': 'tux'}, {'frequency': 'c', 'id': 373, 'synset': 'dish.n.01', 'synonyms': ['dish'], 'def': 'a piece of dishware normally used as a container for holding or serving food', 'name': 'dish'}, {'frequency': 'c', 'id': 374, 'synset': 'dish.n.05', 'synonyms': ['dish_antenna'], 'def': 'directional antenna consisting of a parabolic reflector', 'name': 'dish_antenna'}, {'frequency': 'c', 'id': 375, 'synset': 'dishrag.n.01', 'synonyms': ['dishrag', 'dishcloth'], 'def': 'a cloth for washing dishes', 'name': 'dishrag'}, {'frequency': 'c', 'id': 376, 'synset': 'dishtowel.n.01', 'synonyms': ['dishtowel', 'tea_towel'], 'def': 'a towel for drying dishes', 'name': 'dishtowel'}, {'frequency': 'f', 'id': 377, 'synset': 'dishwasher.n.01', 'synonyms': ['dishwasher', 'dishwashing_machine'], 'def': 'a machine for washing dishes', 'name': 'dishwasher'}, {'frequency': 'r', 'id': 378, 'synset': 'dishwasher_detergent.n.01', 'synonyms': ['dishwasher_detergent', 'dishwashing_detergent', 'dishwashing_liquid'], 'def': 'a low-sudsing detergent designed for use in dishwashers', 'name': 'dishwasher_detergent'}, {'frequency': 'r', 'id': 379, 'synset': 'diskette.n.01', 'synonyms': ['diskette', 'floppy', 'floppy_disk'], 'def': 'a small plastic magnetic disk enclosed in a stiff envelope used to store data', 'name': 'diskette'}, {'frequency': 'c', 'id': 380, 'synset': 'dispenser.n.01', 'synonyms': ['dispenser'], 'def': 'a container so designed that the contents can be used in prescribed amounts', 'name': 'dispenser'}, {'frequency': 'c', 'id': 381, 'synset': 'dixie_cup.n.01', 'synonyms': ['Dixie_cup', 'paper_cup'], 'def': 'a disposable cup made of paper; for holding drinks', 'name': 'Dixie_cup'}, {'frequency': 'f', 'id': 382, 'synset': 'dog.n.01', 'synonyms': ['dog'], 'def': 'a common domesticated dog', 'name': 'dog'}, {'frequency': 'f', 'id': 383, 'synset': 'dog_collar.n.01', 'synonyms': ['dog_collar'], 'def': 'a collar for a dog', 'name': 'dog_collar'}, {'frequency': 'c', 'id': 384, 'synset': 'doll.n.01', 'synonyms': ['doll'], 'def': 'a toy replica of a HUMAN (NOT AN ANIMAL)', 'name': 'doll'}, {'frequency': 'r', 'id': 385, 'synset': 'dollar.n.02', 'synonyms': ['dollar', 'dollar_bill', 'one_dollar_bill'], 'def': 'a piece of paper money worth one dollar', 'name': 'dollar'}, {'frequency': 'r', 'id': 386, 'synset': 'dolphin.n.02', 'synonyms': ['dolphin'], 'def': 'any of various small toothed whales with a beaklike snout; larger than porpoises', 'name': 'dolphin'}, {'frequency': 'c', 'id': 387, 'synset': 'domestic_ass.n.01', 'synonyms': ['domestic_ass', 'donkey'], 'def': 'domestic beast of burden descended from the African wild ass; patient but stubborn', 'name': 'domestic_ass'}, {'frequency': 'r', 'id': 388, 'synset': 'domino.n.03', 'synonyms': ['eye_mask'], 'def': 'a mask covering the upper part of the face but with holes for the eyes', 'name': 'eye_mask'}, {'frequency': 'r', 'id': 389, 'synset': 'doorbell.n.01', 'synonyms': ['doorbell', 'buzzer'], 'def': 'a button at an outer door that gives a ringing or buzzing signal when pushed', 'name': 'doorbell'}, {'frequency': 'f', 'id': 390, 'synset': 'doorknob.n.01', 'synonyms': ['doorknob', 'doorhandle'], 'def': "a knob used to open a door (often called `doorhandle' in Great Britain)", 'name': 'doorknob'}, {'frequency': 'c', 'id': 391, 'synset': 'doormat.n.02', 'synonyms': ['doormat', 'welcome_mat'], 'def': 'a mat placed outside an exterior door for wiping the shoes before entering', 'name': 'doormat'}, {'frequency': 'f', 'id': 392, 'synset': 'doughnut.n.02', 'synonyms': ['doughnut', 'donut'], 'def': 'a small ring-shaped friedcake', 'name': 'doughnut'}, {'frequency': 'r', 'id': 393, 'synset': 'dove.n.01', 'synonyms': ['dove'], 'def': 'any of numerous small pigeons', 'name': 'dove'}, {'frequency': 'r', 'id': 394, 'synset': 'dragonfly.n.01', 'synonyms': ['dragonfly'], 'def': 'slender-bodied non-stinging insect having iridescent wings that are outspread at rest', 'name': 'dragonfly'}, {'frequency': 'f', 'id': 395, 'synset': 'drawer.n.01', 'synonyms': ['drawer'], 'def': 'a boxlike container in a piece of furniture; made so as to slide in and out', 'name': 'drawer'}, {'frequency': 'c', 'id': 396, 'synset': 'drawers.n.01', 'synonyms': ['underdrawers', 'boxers', 'boxershorts'], 'def': 'underpants worn by men', 'name': 'underdrawers'}, {'frequency': 'f', 'id': 397, 'synset': 'dress.n.01', 'synonyms': ['dress', 'frock'], 'def': 'a one-piece garment for a woman; has skirt and bodice', 'name': 'dress'}, {'frequency': 'c', 'id': 398, 'synset': 'dress_hat.n.01', 'synonyms': ['dress_hat', 'high_hat', 'opera_hat', 'silk_hat', 'top_hat'], 'def': "a man's hat with a tall crown; usually covered with silk or with beaver fur", 'name': 'dress_hat'}, {'frequency': 'c', 'id': 399, 'synset': 'dress_suit.n.01', 'synonyms': ['dress_suit'], 'def': 'formalwear consisting of full evening dress for men', 'name': 'dress_suit'}, {'frequency': 'c', 'id': 400, 'synset': 'dresser.n.05', 'synonyms': ['dresser'], 'def': 'a cabinet with shelves', 'name': 'dresser'}, {'frequency': 'c', 'id': 401, 'synset': 'drill.n.01', 'synonyms': ['drill'], 'def': 'a tool with a sharp rotating point for making holes in hard materials', 'name': 'drill'}, {'frequency': 'r', 'id': 402, 'synset': 'drinking_fountain.n.01', 'synonyms': ['drinking_fountain'], 'def': 'a public fountain to provide a jet of drinking water', 'name': 'drinking_fountain'}, {'frequency': 'r', 'id': 403, 'synset': 'drone.n.04', 'synonyms': ['drone'], 'def': 'an aircraft without a pilot that is operated by remote control', 'name': 'drone'}, {'frequency': 'r', 'id': 404, 'synset': 'dropper.n.01', 'synonyms': ['dropper', 'eye_dropper'], 'def': 'pipet consisting of a small tube with a vacuum bulb at one end for drawing liquid in and releasing it a drop at a time', 'name': 'dropper'}, {'frequency': 'c', 'id': 405, 'synset': 'drum.n.01', 'synonyms': ['drum_(musical_instrument)'], 'def': 'a musical percussion instrument; usually consists of a hollow cylinder with a membrane stretched across each end', 'name': 'drum_(musical_instrument)'}, {'frequency': 'r', 'id': 406, 'synset': 'drumstick.n.02', 'synonyms': ['drumstick'], 'def': 'a stick used for playing a drum', 'name': 'drumstick'}, {'frequency': 'f', 'id': 407, 'synset': 'duck.n.01', 'synonyms': ['duck'], 'def': 'small web-footed broad-billed swimming bird', 'name': 'duck'}, {'frequency': 'r', 'id': 408, 'synset': 'duckling.n.02', 'synonyms': ['duckling'], 'def': 'young duck', 'name': 'duckling'}, {'frequency': 'c', 'id': 409, 'synset': 'duct_tape.n.01', 'synonyms': ['duct_tape'], 'def': 'a wide silvery adhesive tape', 'name': 'duct_tape'}, {'frequency': 'f', 'id': 410, 'synset': 'duffel_bag.n.01', 'synonyms': ['duffel_bag', 'duffle_bag', 'duffel', 'duffle'], 'def': 'a large cylindrical bag of heavy cloth', 'name': 'duffel_bag'}, {'frequency': 'r', 'id': 411, 'synset': 'dumbbell.n.01', 'synonyms': ['dumbbell'], 'def': 'an exercising weight with two ball-like ends connected by a short handle', 'name': 'dumbbell'}, {'frequency': 'c', 'id': 412, 'synset': 'dumpster.n.01', 'synonyms': ['dumpster'], 'def': 'a container designed to receive and transport and dump waste', 'name': 'dumpster'}, {'frequency': 'r', 'id': 413, 'synset': 'dustpan.n.02', 'synonyms': ['dustpan'], 'def': 'a short-handled receptacle into which dust can be swept', 'name': 'dustpan'}, {'frequency': 'r', 'id': 414, 'synset': 'dutch_oven.n.02', 'synonyms': ['Dutch_oven'], 'def': 'iron or earthenware cooking pot; used for stews', 'name': 'Dutch_oven'}, {'frequency': 'c', 'id': 415, 'synset': 'eagle.n.01', 'synonyms': ['eagle'], 'def': 'large birds of prey noted for their broad wings and strong soaring flight', 'name': 'eagle'}, {'frequency': 'f', 'id': 416, 'synset': 'earphone.n.01', 'synonyms': ['earphone', 'earpiece', 'headphone'], 'def': 'device for listening to audio that is held over or inserted into the ear', 'name': 'earphone'}, {'frequency': 'r', 'id': 417, 'synset': 'earplug.n.01', 'synonyms': ['earplug'], 'def': 'a soft plug that is inserted into the ear canal to block sound', 'name': 'earplug'}, {'frequency': 'f', 'id': 418, 'synset': 'earring.n.01', 'synonyms': ['earring'], 'def': 'jewelry to ornament the ear', 'name': 'earring'}, {'frequency': 'c', 'id': 419, 'synset': 'easel.n.01', 'synonyms': ['easel'], 'def': "an upright tripod for displaying something (usually an artist's canvas)", 'name': 'easel'}, {'frequency': 'r', 'id': 420, 'synset': 'eclair.n.01', 'synonyms': ['eclair'], 'def': 'oblong cream puff', 'name': 'eclair'}, {'frequency': 'r', 'id': 421, 'synset': 'eel.n.01', 'synonyms': ['eel'], 'def': 'an elongate fish with fatty flesh', 'name': 'eel'}, {'frequency': 'f', 'id': 422, 'synset': 'egg.n.02', 'synonyms': ['egg', 'eggs'], 'def': 'oval reproductive body of a fowl (especially a hen) used as food', 'name': 'egg'}, {'frequency': 'r', 'id': 423, 'synset': 'egg_roll.n.01', 'synonyms': ['egg_roll', 'spring_roll'], 'def': 'minced vegetables and meat wrapped in a pancake and fried', 'name': 'egg_roll'}, {'frequency': 'c', 'id': 424, 'synset': 'egg_yolk.n.01', 'synonyms': ['egg_yolk', 'yolk_(egg)'], 'def': 'the yellow spherical part of an egg', 'name': 'egg_yolk'}, {'frequency': 'c', 'id': 425, 'synset': 'eggbeater.n.02', 'synonyms': ['eggbeater', 'eggwhisk'], 'def': 'a mixer for beating eggs or whipping cream', 'name': 'eggbeater'}, {'frequency': 'c', 'id': 426, 'synset': 'eggplant.n.01', 'synonyms': ['eggplant', 'aubergine'], 'def': 'egg-shaped vegetable having a shiny skin typically dark purple', 'name': 'eggplant'}, {'frequency': 'r', 'id': 427, 'synset': 'electric_chair.n.01', 'synonyms': ['electric_chair'], 'def': 'a chair-shaped instrument of execution by electrocution', 'name': 'electric_chair'}, {'frequency': 'f', 'id': 428, 'synset': 'electric_refrigerator.n.01', 'synonyms': ['refrigerator'], 'def': 'a refrigerator in which the coolant is pumped around by an electric motor', 'name': 'refrigerator'}, {'frequency': 'f', 'id': 429, 'synset': 'elephant.n.01', 'synonyms': ['elephant'], 'def': 'a common elephant', 'name': 'elephant'}, {'frequency': 'r', 'id': 430, 'synset': 'elk.n.01', 'synonyms': ['elk', 'moose'], 'def': 'large northern deer with enormous flattened antlers in the male', 'name': 'elk'}, {'frequency': 'c', 'id': 431, 'synset': 'envelope.n.01', 'synonyms': ['envelope'], 'def': 'a flat (usually rectangular) container for a letter, thin package, etc.', 'name': 'envelope'}, {'frequency': 'c', 'id': 432, 'synset': 'eraser.n.01', 'synonyms': ['eraser'], 'def': 'an implement used to erase something', 'name': 'eraser'}, {'frequency': 'r', 'id': 433, 'synset': 'escargot.n.01', 'synonyms': ['escargot'], 'def': 'edible snail usually served in the shell with a sauce of melted butter and garlic', 'name': 'escargot'}, {'frequency': 'r', 'id': 434, 'synset': 'eyepatch.n.01', 'synonyms': ['eyepatch'], 'def': 'a protective cloth covering for an injured eye', 'name': 'eyepatch'}, {'frequency': 'r', 'id': 435, 'synset': 'falcon.n.01', 'synonyms': ['falcon'], 'def': 'birds of prey having long pointed powerful wings adapted for swift flight', 'name': 'falcon'}, {'frequency': 'f', 'id': 436, 'synset': 'fan.n.01', 'synonyms': ['fan'], 'def': 'a device for creating a current of air by movement of a surface or surfaces', 'name': 'fan'}, {'frequency': 'f', 'id': 437, 'synset': 'faucet.n.01', 'synonyms': ['faucet', 'spigot', 'tap'], 'def': 'a regulator for controlling the flow of a liquid from a reservoir', 'name': 'faucet'}, {'frequency': 'r', 'id': 438, 'synset': 'fedora.n.01', 'synonyms': ['fedora'], 'def': 'a hat made of felt with a creased crown', 'name': 'fedora'}, {'frequency': 'r', 'id': 439, 'synset': 'ferret.n.02', 'synonyms': ['ferret'], 'def': 'domesticated albino variety of the European polecat bred for hunting rats and rabbits', 'name': 'ferret'}, {'frequency': 'c', 'id': 440, 'synset': 'ferris_wheel.n.01', 'synonyms': ['Ferris_wheel'], 'def': 'a large wheel with suspended seats that remain upright as the wheel rotates', 'name': 'Ferris_wheel'}, {'frequency': 'r', 'id': 441, 'synset': 'ferry.n.01', 'synonyms': ['ferry', 'ferryboat'], 'def': 'a boat that transports people or vehicles across a body of water and operates on a regular schedule', 'name': 'ferry'}, {'frequency': 'r', 'id': 442, 'synset': 'fig.n.04', 'synonyms': ['fig_(fruit)'], 'def': 'fleshy sweet pear-shaped yellowish or purple fruit eaten fresh or preserved or dried', 'name': 'fig_(fruit)'}, {'frequency': 'c', 'id': 443, 'synset': 'fighter.n.02', 'synonyms': ['fighter_jet', 'fighter_aircraft', 'attack_aircraft'], 'def': 'a high-speed military or naval airplane designed to destroy enemy targets', 'name': 'fighter_jet'}, {'frequency': 'f', 'id': 444, 'synset': 'figurine.n.01', 'synonyms': ['figurine'], 'def': 'a small carved or molded figure', 'name': 'figurine'}, {'frequency': 'c', 'id': 445, 'synset': 'file.n.03', 'synonyms': ['file_cabinet', 'filing_cabinet'], 'def': 'office furniture consisting of a container for keeping papers in order', 'name': 'file_cabinet'}, {'frequency': 'r', 'id': 446, 'synset': 'file.n.04', 'synonyms': ['file_(tool)'], 'def': 'a steel hand tool with small sharp teeth on some or all of its surfaces; used for smoothing wood or metal', 'name': 'file_(tool)'}, {'frequency': 'f', 'id': 447, 'synset': 'fire_alarm.n.02', 'synonyms': ['fire_alarm', 'smoke_alarm'], 'def': 'an alarm that is tripped off by fire or smoke', 'name': 'fire_alarm'}, {'frequency': 'c', 'id': 448, 'synset': 'fire_engine.n.01', 'synonyms': ['fire_engine', 'fire_truck'], 'def': 'large trucks that carry firefighters and equipment to the site of a fire', 'name': 'fire_engine'}, {'frequency': 'c', 'id': 449, 'synset': 'fire_extinguisher.n.01', 'synonyms': ['fire_extinguisher', 'extinguisher'], 'def': 'a manually operated device for extinguishing small fires', 'name': 'fire_extinguisher'}, {'frequency': 'c', 'id': 450, 'synset': 'fire_hose.n.01', 'synonyms': ['fire_hose'], 'def': 'a large hose that carries water from a fire hydrant to the site of the fire', 'name': 'fire_hose'}, {'frequency': 'f', 'id': 451, 'synset': 'fireplace.n.01', 'synonyms': ['fireplace'], 'def': 'an open recess in a wall at the base of a chimney where a fire can be built', 'name': 'fireplace'}, {'frequency': 'f', 'id': 452, 'synset': 'fireplug.n.01', 'synonyms': ['fireplug', 'fire_hydrant', 'hydrant'], 'def': 'an upright hydrant for drawing water to use in fighting a fire', 'name': 'fireplug'}, {'frequency': 'c', 'id': 453, 'synset': 'fish.n.01', 'synonyms': ['fish'], 'def': 'any of various mostly cold-blooded aquatic vertebrates usually having scales and breathing through gills', 'name': 'fish'}, {'frequency': 'r', 'id': 454, 'synset': 'fish.n.02', 'synonyms': ['fish_(food)'], 'def': 'the flesh of fish used as food', 'name': 'fish_(food)'}, {'frequency': 'r', 'id': 455, 'synset': 'fishbowl.n.02', 'synonyms': ['fishbowl', 'goldfish_bowl'], 'def': 'a transparent bowl in which small fish are kept', 'name': 'fishbowl'}, {'frequency': 'r', 'id': 456, 'synset': 'fishing_boat.n.01', 'synonyms': ['fishing_boat', 'fishing_vessel'], 'def': 'a vessel for fishing', 'name': 'fishing_boat'}, {'frequency': 'c', 'id': 457, 'synset': 'fishing_rod.n.01', 'synonyms': ['fishing_rod', 'fishing_pole'], 'def': 'a rod that is used in fishing to extend the fishing line', 'name': 'fishing_rod'}, {'frequency': 'f', 'id': 458, 'synset': 'flag.n.01', 'synonyms': ['flag'], 'def': 'emblem usually consisting of a rectangular piece of cloth of distinctive design (do not include pole)', 'name': 'flag'}, {'frequency': 'f', 'id': 459, 'synset': 'flagpole.n.02', 'synonyms': ['flagpole', 'flagstaff'], 'def': 'a tall staff or pole on which a flag is raised', 'name': 'flagpole'}, {'frequency': 'c', 'id': 460, 'synset': 'flamingo.n.01', 'synonyms': ['flamingo'], 'def': 'large pink web-footed bird with down-bent bill', 'name': 'flamingo'}, {'frequency': 'c', 'id': 461, 'synset': 'flannel.n.01', 'synonyms': ['flannel'], 'def': 'a soft light woolen fabric; used for clothing', 'name': 'flannel'}, {'frequency': 'r', 'id': 462, 'synset': 'flash.n.10', 'synonyms': ['flash', 'flashbulb'], 'def': 'a lamp for providing momentary light to take a photograph', 'name': 'flash'}, {'frequency': 'c', 'id': 463, 'synset': 'flashlight.n.01', 'synonyms': ['flashlight', 'torch'], 'def': 'a small portable battery-powered electric lamp', 'name': 'flashlight'}, {'frequency': 'r', 'id': 464, 'synset': 'fleece.n.03', 'synonyms': ['fleece'], 'def': 'a soft bulky fabric with deep pile; used chiefly for clothing', 'name': 'fleece'}, {'frequency': 'f', 'id': 465, 'synset': 'flip-flop.n.02', 'synonyms': ['flip-flop_(sandal)'], 'def': 'a backless sandal held to the foot by a thong between two toes', 'name': 'flip-flop_(sandal)'}, {'frequency': 'c', 'id': 466, 'synset': 'flipper.n.01', 'synonyms': ['flipper_(footwear)', 'fin_(footwear)'], 'def': 'a shoe to aid a person in swimming', 'name': 'flipper_(footwear)'}, {'frequency': 'f', 'id': 467, 'synset': 'flower_arrangement.n.01', 'synonyms': ['flower_arrangement', 'floral_arrangement'], 'def': 'a decorative arrangement of flowers', 'name': 'flower_arrangement'}, {'frequency': 'c', 'id': 468, 'synset': 'flute.n.02', 'synonyms': ['flute_glass', 'champagne_flute'], 'def': 'a tall narrow wineglass', 'name': 'flute_glass'}, {'frequency': 'r', 'id': 469, 'synset': 'foal.n.01', 'synonyms': ['foal'], 'def': 'a young horse', 'name': 'foal'}, {'frequency': 'c', 'id': 470, 'synset': 'folding_chair.n.01', 'synonyms': ['folding_chair'], 'def': 'a chair that can be folded flat for storage', 'name': 'folding_chair'}, {'frequency': 'c', 'id': 471, 'synset': 'food_processor.n.01', 'synonyms': ['food_processor'], 'def': 'a kitchen appliance for shredding, blending, chopping, or slicing food', 'name': 'food_processor'}, {'frequency': 'c', 'id': 472, 'synset': 'football.n.02', 'synonyms': ['football_(American)'], 'def': 'the inflated oblong ball used in playing American football', 'name': 'football_(American)'}, {'frequency': 'r', 'id': 473, 'synset': 'football_helmet.n.01', 'synonyms': ['football_helmet'], 'def': 'a padded helmet with a face mask to protect the head of football players', 'name': 'football_helmet'}, {'frequency': 'c', 'id': 474, 'synset': 'footstool.n.01', 'synonyms': ['footstool', 'footrest'], 'def': 'a low seat or a stool to rest the feet of a seated person', 'name': 'footstool'}, {'frequency': 'f', 'id': 475, 'synset': 'fork.n.01', 'synonyms': ['fork'], 'def': 'cutlery used for serving and eating food', 'name': 'fork'}, {'frequency': 'r', 'id': 476, 'synset': 'forklift.n.01', 'synonyms': ['forklift'], 'def': 'an industrial vehicle with a power operated fork in front that can be inserted under loads to lift and move them', 'name': 'forklift'}, {'frequency': 'r', 'id': 477, 'synset': 'freight_car.n.01', 'synonyms': ['freight_car'], 'def': 'a railway car that carries freight', 'name': 'freight_car'}, {'frequency': 'r', 'id': 478, 'synset': 'french_toast.n.01', 'synonyms': ['French_toast'], 'def': 'bread slice dipped in egg and milk and fried', 'name': 'French_toast'}, {'frequency': 'c', 'id': 479, 'synset': 'freshener.n.01', 'synonyms': ['freshener', 'air_freshener'], 'def': 'anything that freshens', 'name': 'freshener'}, {'frequency': 'f', 'id': 480, 'synset': 'frisbee.n.01', 'synonyms': ['frisbee'], 'def': 'a light, plastic disk propelled with a flip of the wrist for recreation or competition', 'name': 'frisbee'}, {'frequency': 'c', 'id': 481, 'synset': 'frog.n.01', 'synonyms': ['frog', 'toad', 'toad_frog'], 'def': 'a tailless stout-bodied amphibians with long hind limbs for leaping', 'name': 'frog'}, {'frequency': 'c', 'id': 482, 'synset': 'fruit_juice.n.01', 'synonyms': ['fruit_juice'], 'def': 'drink produced by squeezing or crushing fruit', 'name': 'fruit_juice'}, {'frequency': 'r', 'id': 483, 'synset': 'fruit_salad.n.01', 'synonyms': ['fruit_salad'], 'def': 'salad composed of fruits', 'name': 'fruit_salad'}, {'frequency': 'c', 'id': 484, 'synset': 'frying_pan.n.01', 'synonyms': ['frying_pan', 'frypan', 'skillet'], 'def': 'a pan used for frying foods', 'name': 'frying_pan'}, {'frequency': 'r', 'id': 485, 'synset': 'fudge.n.01', 'synonyms': ['fudge'], 'def': 'soft creamy candy', 'name': 'fudge'}, {'frequency': 'r', 'id': 486, 'synset': 'funnel.n.02', 'synonyms': ['funnel'], 'def': 'a cone-shaped utensil used to channel a substance into a container with a small mouth', 'name': 'funnel'}, {'frequency': 'c', 'id': 487, 'synset': 'futon.n.01', 'synonyms': ['futon'], 'def': 'a pad that is used for sleeping on the floor or on a raised frame', 'name': 'futon'}, {'frequency': 'r', 'id': 488, 'synset': 'gag.n.02', 'synonyms': ['gag', 'muzzle'], 'def': "restraint put into a person's mouth to prevent speaking or shouting", 'name': 'gag'}, {'frequency': 'r', 'id': 489, 'synset': 'garbage.n.03', 'synonyms': ['garbage'], 'def': 'a receptacle where waste can be discarded', 'name': 'garbage'}, {'frequency': 'c', 'id': 490, 'synset': 'garbage_truck.n.01', 'synonyms': ['garbage_truck'], 'def': 'a truck for collecting domestic refuse', 'name': 'garbage_truck'}, {'frequency': 'c', 'id': 491, 'synset': 'garden_hose.n.01', 'synonyms': ['garden_hose'], 'def': 'a hose used for watering a lawn or garden', 'name': 'garden_hose'}, {'frequency': 'c', 'id': 492, 'synset': 'gargle.n.01', 'synonyms': ['gargle', 'mouthwash'], 'def': 'a medicated solution used for gargling and rinsing the mouth', 'name': 'gargle'}, {'frequency': 'r', 'id': 493, 'synset': 'gargoyle.n.02', 'synonyms': ['gargoyle'], 'def': 'an ornament consisting of a grotesquely carved figure of a person or animal', 'name': 'gargoyle'}, {'frequency': 'c', 'id': 494, 'synset': 'garlic.n.02', 'synonyms': ['garlic', 'ail'], 'def': 'aromatic bulb used as seasoning', 'name': 'garlic'}, {'frequency': 'r', 'id': 495, 'synset': 'gasmask.n.01', 'synonyms': ['gasmask', 'respirator', 'gas_helmet'], 'def': 'a protective face mask with a filter', 'name': 'gasmask'}, {'frequency': 'r', 'id': 496, 'synset': 'gazelle.n.01', 'synonyms': ['gazelle'], 'def': 'small swift graceful antelope of Africa and Asia having lustrous eyes', 'name': 'gazelle'}, {'frequency': 'c', 'id': 497, 'synset': 'gelatin.n.02', 'synonyms': ['gelatin', 'jelly'], 'def': 'an edible jelly made with gelatin and used as a dessert or salad base or a coating for foods', 'name': 'gelatin'}, {'frequency': 'r', 'id': 498, 'synset': 'gem.n.02', 'synonyms': ['gemstone'], 'def': 'a crystalline rock that can be cut and polished for jewelry', 'name': 'gemstone'}, {'frequency': 'c', 'id': 499, 'synset': 'giant_panda.n.01', 'synonyms': ['giant_panda', 'panda', 'panda_bear'], 'def': 'large black-and-white herbivorous mammal of bamboo forests of China and Tibet', 'name': 'giant_panda'}, {'frequency': 'c', 'id': 500, 'synset': 'gift_wrap.n.01', 'synonyms': ['gift_wrap'], 'def': 'attractive wrapping paper suitable for wrapping gifts', 'name': 'gift_wrap'}, {'frequency': 'c', 'id': 501, 'synset': 'ginger.n.03', 'synonyms': ['ginger', 'gingerroot'], 'def': 'the root of the common ginger plant; used fresh as a seasoning', 'name': 'ginger'}, {'frequency': 'f', 'id': 502, 'synset': 'giraffe.n.01', 'synonyms': ['giraffe'], 'def': 'tall animal having a spotted coat and small horns and very long neck and legs', 'name': 'giraffe'}, {'frequency': 'c', 'id': 503, 'synset': 'girdle.n.02', 'synonyms': ['cincture', 'sash', 'waistband', 'waistcloth'], 'def': 'a band of material around the waist that strengthens a skirt or trousers', 'name': 'cincture'}, {'frequency': 'f', 'id': 504, 'synset': 'glass.n.02', 'synonyms': ['glass_(drink_container)', 'drinking_glass'], 'def': 'a container for holding liquids while drinking', 'name': 'glass_(drink_container)'}, {'frequency': 'c', 'id': 505, 'synset': 'globe.n.03', 'synonyms': ['globe'], 'def': 'a sphere on which a map (especially of the earth) is represented', 'name': 'globe'}, {'frequency': 'f', 'id': 506, 'synset': 'glove.n.02', 'synonyms': ['glove'], 'def': 'handwear covering the hand', 'name': 'glove'}, {'frequency': 'c', 'id': 507, 'synset': 'goat.n.01', 'synonyms': ['goat'], 'def': 'a common goat', 'name': 'goat'}, {'frequency': 'f', 'id': 508, 'synset': 'goggles.n.01', 'synonyms': ['goggles'], 'def': 'tight-fitting spectacles worn to protect the eyes', 'name': 'goggles'}, {'frequency': 'r', 'id': 509, 'synset': 'goldfish.n.01', 'synonyms': ['goldfish'], 'def': 'small golden or orange-red freshwater fishes used as pond or aquarium pets', 'name': 'goldfish'}, {'frequency': 'r', 'id': 510, 'synset': 'golf_club.n.02', 'synonyms': ['golf_club', 'golf-club'], 'def': 'golf equipment used by a golfer to hit a golf ball', 'name': 'golf_club'}, {'frequency': 'c', 'id': 511, 'synset': 'golfcart.n.01', 'synonyms': ['golfcart'], 'def': 'a small motor vehicle in which golfers can ride between shots', 'name': 'golfcart'}, {'frequency': 'r', 'id': 512, 'synset': 'gondola.n.02', 'synonyms': ['gondola_(boat)'], 'def': 'long narrow flat-bottomed boat propelled by sculling; traditionally used on canals of Venice', 'name': 'gondola_(boat)'}, {'frequency': 'c', 'id': 513, 'synset': 'goose.n.01', 'synonyms': ['goose'], 'def': 'loud, web-footed long-necked aquatic birds usually larger than ducks', 'name': 'goose'}, {'frequency': 'r', 'id': 514, 'synset': 'gorilla.n.01', 'synonyms': ['gorilla'], 'def': 'largest ape', 'name': 'gorilla'}, {'frequency': 'r', 'id': 515, 'synset': 'gourd.n.02', 'synonyms': ['gourd'], 'def': 'any of numerous inedible fruits with hard rinds', 'name': 'gourd'}, {'frequency': 'r', 'id': 516, 'synset': 'gown.n.04', 'synonyms': ['surgical_gown', 'scrubs_(surgical_clothing)'], 'def': 'protective garment worn by surgeons during operations', 'name': 'surgical_gown'}, {'frequency': 'f', 'id': 517, 'synset': 'grape.n.01', 'synonyms': ['grape'], 'def': 'any of various juicy fruit with green or purple skins; grow in clusters', 'name': 'grape'}, {'frequency': 'r', 'id': 518, 'synset': 'grasshopper.n.01', 'synonyms': ['grasshopper'], 'def': 'plant-eating insect with hind legs adapted for leaping', 'name': 'grasshopper'}, {'frequency': 'c', 'id': 519, 'synset': 'grater.n.01', 'synonyms': ['grater'], 'def': 'utensil with sharp perforations for shredding foods (as vegetables or cheese)', 'name': 'grater'}, {'frequency': 'c', 'id': 520, 'synset': 'gravestone.n.01', 'synonyms': ['gravestone', 'headstone', 'tombstone'], 'def': 'a stone that is used to mark a grave', 'name': 'gravestone'}, {'frequency': 'r', 'id': 521, 'synset': 'gravy_boat.n.01', 'synonyms': ['gravy_boat', 'gravy_holder'], 'def': 'a dish (often boat-shaped) for serving gravy or sauce', 'name': 'gravy_boat'}, {'frequency': 'c', 'id': 522, 'synset': 'green_bean.n.02', 'synonyms': ['green_bean'], 'def': 'a common bean plant cultivated for its slender green edible pods', 'name': 'green_bean'}, {'frequency': 'c', 'id': 523, 'synset': 'green_onion.n.01', 'synonyms': ['green_onion', 'spring_onion', 'scallion'], 'def': 'a young onion before the bulb has enlarged', 'name': 'green_onion'}, {'frequency': 'r', 'id': 524, 'synset': 'griddle.n.01', 'synonyms': ['griddle'], 'def': 'cooking utensil consisting of a flat heated surface on which food is cooked', 'name': 'griddle'}, {'frequency': 'r', 'id': 525, 'synset': 'grillroom.n.01', 'synonyms': ['grillroom', 'grill_(restaurant)'], 'def': 'a restaurant where food is cooked on a grill', 'name': 'grillroom'}, {'frequency': 'r', 'id': 526, 'synset': 'grinder.n.04', 'synonyms': ['grinder_(tool)'], 'def': 'a machine tool that polishes metal', 'name': 'grinder_(tool)'}, {'frequency': 'r', 'id': 527, 'synset': 'grits.n.01', 'synonyms': ['grits', 'hominy_grits'], 'def': 'coarsely ground corn boiled as a breakfast dish', 'name': 'grits'}, {'frequency': 'c', 'id': 528, 'synset': 'grizzly.n.01', 'synonyms': ['grizzly', 'grizzly_bear'], 'def': 'powerful brownish-yellow bear of the uplands of western North America', 'name': 'grizzly'}, {'frequency': 'c', 'id': 529, 'synset': 'grocery_bag.n.01', 'synonyms': ['grocery_bag'], 'def': "a sack for holding customer's groceries", 'name': 'grocery_bag'}, {'frequency': 'r', 'id': 530, 'synset': 'guacamole.n.01', 'synonyms': ['guacamole'], 'def': 'a dip made of mashed avocado mixed with chopped onions and other seasonings', 'name': 'guacamole'}, {'frequency': 'f', 'id': 531, 'synset': 'guitar.n.01', 'synonyms': ['guitar'], 'def': 'a stringed instrument usually having six strings; played by strumming or plucking', 'name': 'guitar'}, {'frequency': 'c', 'id': 532, 'synset': 'gull.n.02', 'synonyms': ['gull', 'seagull'], 'def': 'mostly white aquatic bird having long pointed wings and short legs', 'name': 'gull'}, {'frequency': 'c', 'id': 533, 'synset': 'gun.n.01', 'synonyms': ['gun'], 'def': 'a weapon that discharges a bullet at high velocity from a metal tube', 'name': 'gun'}, {'frequency': 'r', 'id': 534, 'synset': 'hair_spray.n.01', 'synonyms': ['hair_spray'], 'def': 'substance sprayed on the hair to hold it in place', 'name': 'hair_spray'}, {'frequency': 'c', 'id': 535, 'synset': 'hairbrush.n.01', 'synonyms': ['hairbrush'], 'def': "a brush used to groom a person's hair", 'name': 'hairbrush'}, {'frequency': 'c', 'id': 536, 'synset': 'hairnet.n.01', 'synonyms': ['hairnet'], 'def': 'a small net that someone wears over their hair to keep it in place', 'name': 'hairnet'}, {'frequency': 'c', 'id': 537, 'synset': 'hairpin.n.01', 'synonyms': ['hairpin'], 'def': "a double pronged pin used to hold women's hair in place", 'name': 'hairpin'}, {'frequency': 'f', 'id': 538, 'synset': 'ham.n.01', 'synonyms': ['ham', 'jambon', 'gammon'], 'def': 'meat cut from the thigh of a hog (usually smoked)', 'name': 'ham'}, {'frequency': 'c', 'id': 539, 'synset': 'hamburger.n.01', 'synonyms': ['hamburger', 'beefburger', 'burger'], 'def': 'a sandwich consisting of a patty of minced beef served on a bun', 'name': 'hamburger'}, {'frequency': 'c', 'id': 540, 'synset': 'hammer.n.02', 'synonyms': ['hammer'], 'def': 'a hand tool with a heavy head and a handle; used to deliver an impulsive force by striking', 'name': 'hammer'}, {'frequency': 'r', 'id': 541, 'synset': 'hammock.n.02', 'synonyms': ['hammock'], 'def': 'a hanging bed of canvas or rope netting (usually suspended between two trees)', 'name': 'hammock'}, {'frequency': 'r', 'id': 542, 'synset': 'hamper.n.02', 'synonyms': ['hamper'], 'def': 'a basket usually with a cover', 'name': 'hamper'}, {'frequency': 'r', 'id': 543, 'synset': 'hamster.n.01', 'synonyms': ['hamster'], 'def': 'short-tailed burrowing rodent with large cheek pouches', 'name': 'hamster'}, {'frequency': 'c', 'id': 544, 'synset': 'hand_blower.n.01', 'synonyms': ['hair_dryer'], 'def': 'a hand-held electric blower that can blow warm air onto the hair', 'name': 'hair_dryer'}, {'frequency': 'r', 'id': 545, 'synset': 'hand_glass.n.01', 'synonyms': ['hand_glass', 'hand_mirror'], 'def': 'a mirror intended to be held in the hand', 'name': 'hand_glass'}, {'frequency': 'f', 'id': 546, 'synset': 'hand_towel.n.01', 'synonyms': ['hand_towel', 'face_towel'], 'def': 'a small towel used to dry the hands or face', 'name': 'hand_towel'}, {'frequency': 'c', 'id': 547, 'synset': 'handcart.n.01', 'synonyms': ['handcart', 'pushcart', 'hand_truck'], 'def': 'wheeled vehicle that can be pushed by a person', 'name': 'handcart'}, {'frequency': 'r', 'id': 548, 'synset': 'handcuff.n.01', 'synonyms': ['handcuff'], 'def': 'shackle that consists of a metal loop that can be locked around the wrist', 'name': 'handcuff'}, {'frequency': 'c', 'id': 549, 'synset': 'handkerchief.n.01', 'synonyms': ['handkerchief'], 'def': 'a square piece of cloth used for wiping the eyes or nose or as a costume accessory', 'name': 'handkerchief'}, {'frequency': 'f', 'id': 550, 'synset': 'handle.n.01', 'synonyms': ['handle', 'grip', 'handgrip'], 'def': 'the appendage to an object that is designed to be held in order to use or move it', 'name': 'handle'}, {'frequency': 'r', 'id': 551, 'synset': 'handsaw.n.01', 'synonyms': ['handsaw', "carpenter's_saw"], 'def': 'a saw used with one hand for cutting wood', 'name': 'handsaw'}, {'frequency': 'r', 'id': 552, 'synset': 'hardback.n.01', 'synonyms': ['hardback_book', 'hardcover_book'], 'def': 'a book with cardboard or cloth or leather covers', 'name': 'hardback_book'}, {'frequency': 'r', 'id': 553, 'synset': 'harmonium.n.01', 'synonyms': ['harmonium', 'organ_(musical_instrument)', 'reed_organ_(musical_instrument)'], 'def': 'a free-reed instrument in which air is forced through the reeds by bellows', 'name': 'harmonium'}, {'frequency': 'f', 'id': 554, 'synset': 'hat.n.01', 'synonyms': ['hat'], 'def': 'headwear that protects the head from bad weather, sun, or worn for fashion', 'name': 'hat'}, {'frequency': 'r', 'id': 555, 'synset': 'hatbox.n.01', 'synonyms': ['hatbox'], 'def': 'a round piece of luggage for carrying hats', 'name': 'hatbox'}, {'frequency': 'r', 'id': 556, 'synset': 'hatch.n.03', 'synonyms': ['hatch'], 'def': 'a movable barrier covering a hatchway', 'name': 'hatch'}, {'frequency': 'c', 'id': 557, 'synset': 'head_covering.n.01', 'synonyms': ['veil'], 'def': 'a garment that covers the head and face', 'name': 'veil'}, {'frequency': 'f', 'id': 558, 'synset': 'headband.n.01', 'synonyms': ['headband'], 'def': 'a band worn around or over the head', 'name': 'headband'}, {'frequency': 'f', 'id': 559, 'synset': 'headboard.n.01', 'synonyms': ['headboard'], 'def': 'a vertical board or panel forming the head of a bedstead', 'name': 'headboard'}, {'frequency': 'f', 'id': 560, 'synset': 'headlight.n.01', 'synonyms': ['headlight', 'headlamp'], 'def': 'a powerful light with reflector; attached to the front of an automobile or locomotive', 'name': 'headlight'}, {'frequency': 'c', 'id': 561, 'synset': 'headscarf.n.01', 'synonyms': ['headscarf'], 'def': 'a kerchief worn over the head and tied under the chin', 'name': 'headscarf'}, {'frequency': 'r', 'id': 562, 'synset': 'headset.n.01', 'synonyms': ['headset'], 'def': 'receiver consisting of a pair of headphones', 'name': 'headset'}, {'frequency': 'c', 'id': 563, 'synset': 'headstall.n.01', 'synonyms': ['headstall_(for_horses)', 'headpiece_(for_horses)'], 'def': "the band that is the part of a bridle that fits around a horse's head", 'name': 'headstall_(for_horses)'}, {'frequency': 'r', 'id': 564, 'synset': 'hearing_aid.n.02', 'synonyms': ['hearing_aid'], 'def': 'an acoustic device used to direct sound to the ear of a hearing-impaired person', 'name': 'hearing_aid'}, {'frequency': 'c', 'id': 565, 'synset': 'heart.n.02', 'synonyms': ['heart'], 'def': 'a muscular organ; its contractions move the blood through the body', 'name': 'heart'}, {'frequency': 'c', 'id': 566, 'synset': 'heater.n.01', 'synonyms': ['heater', 'warmer'], 'def': 'device that heats water or supplies warmth to a room', 'name': 'heater'}, {'frequency': 'c', 'id': 567, 'synset': 'helicopter.n.01', 'synonyms': ['helicopter'], 'def': 'an aircraft without wings that obtains its lift from the rotation of overhead blades', 'name': 'helicopter'}, {'frequency': 'f', 'id': 568, 'synset': 'helmet.n.02', 'synonyms': ['helmet'], 'def': 'a protective headgear made of hard material to resist blows', 'name': 'helmet'}, {'frequency': 'r', 'id': 569, 'synset': 'heron.n.02', 'synonyms': ['heron'], 'def': 'grey or white wading bird with long neck and long legs and (usually) long bill', 'name': 'heron'}, {'frequency': 'c', 'id': 570, 'synset': 'highchair.n.01', 'synonyms': ['highchair', 'feeding_chair'], 'def': 'a chair for feeding a very young child', 'name': 'highchair'}, {'frequency': 'f', 'id': 571, 'synset': 'hinge.n.01', 'synonyms': ['hinge'], 'def': 'a joint that holds two parts together so that one can swing relative to the other', 'name': 'hinge'}, {'frequency': 'r', 'id': 572, 'synset': 'hippopotamus.n.01', 'synonyms': ['hippopotamus'], 'def': 'massive thick-skinned animal living in or around rivers of tropical Africa', 'name': 'hippopotamus'}, {'frequency': 'r', 'id': 573, 'synset': 'hockey_stick.n.01', 'synonyms': ['hockey_stick'], 'def': 'sports implement consisting of a stick used by hockey players to move the puck', 'name': 'hockey_stick'}, {'frequency': 'c', 'id': 574, 'synset': 'hog.n.03', 'synonyms': ['hog', 'pig'], 'def': 'domestic swine', 'name': 'hog'}, {'frequency': 'f', 'id': 575, 'synset': 'home_plate.n.01', 'synonyms': ['home_plate_(baseball)', 'home_base_(baseball)'], 'def': '(baseball) a rubber slab where the batter stands; it must be touched by a base runner in order to score', 'name': 'home_plate_(baseball)'}, {'frequency': 'c', 'id': 576, 'synset': 'honey.n.01', 'synonyms': ['honey'], 'def': 'a sweet yellow liquid produced by bees', 'name': 'honey'}, {'frequency': 'f', 'id': 577, 'synset': 'hood.n.06', 'synonyms': ['fume_hood', 'exhaust_hood'], 'def': 'metal covering leading to a vent that exhausts smoke or fumes', 'name': 'fume_hood'}, {'frequency': 'f', 'id': 578, 'synset': 'hook.n.05', 'synonyms': ['hook'], 'def': 'a curved or bent implement for suspending or pulling something', 'name': 'hook'}, {'frequency': 'f', 'id': 579, 'synset': 'horse.n.01', 'synonyms': ['horse'], 'def': 'a common horse', 'name': 'horse'}, {'frequency': 'f', 'id': 580, 'synset': 'hose.n.03', 'synonyms': ['hose', 'hosepipe'], 'def': 'a flexible pipe for conveying a liquid or gas', 'name': 'hose'}, {'frequency': 'r', 'id': 581, 'synset': 'hot-air_balloon.n.01', 'synonyms': ['hot-air_balloon'], 'def': 'balloon for travel through the air in a basket suspended below a large bag of heated air', 'name': 'hot-air_balloon'}, {'frequency': 'r', 'id': 582, 'synset': 'hot_plate.n.01', 'synonyms': ['hotplate'], 'def': 'a portable electric appliance for heating or cooking or keeping food warm', 'name': 'hotplate'}, {'frequency': 'c', 'id': 583, 'synset': 'hot_sauce.n.01', 'synonyms': ['hot_sauce'], 'def': 'a pungent peppery sauce', 'name': 'hot_sauce'}, {'frequency': 'r', 'id': 584, 'synset': 'hourglass.n.01', 'synonyms': ['hourglass'], 'def': 'a sandglass timer that runs for sixty minutes', 'name': 'hourglass'}, {'frequency': 'r', 'id': 585, 'synset': 'houseboat.n.01', 'synonyms': ['houseboat'], 'def': 'a barge that is designed and equipped for use as a dwelling', 'name': 'houseboat'}, {'frequency': 'r', 'id': 586, 'synset': 'hummingbird.n.01', 'synonyms': ['hummingbird'], 'def': 'tiny American bird having brilliant iridescent plumage and long slender bills', 'name': 'hummingbird'}, {'frequency': 'r', 'id': 587, 'synset': 'hummus.n.01', 'synonyms': ['hummus', 'humus', 'hommos', 'hoummos', 'humous'], 'def': 'a thick spread made from mashed chickpeas', 'name': 'hummus'}, {'frequency': 'c', 'id': 588, 'synset': 'ice_bear.n.01', 'synonyms': ['polar_bear'], 'def': 'white bear of Arctic regions', 'name': 'polar_bear'}, {'frequency': 'c', 'id': 589, 'synset': 'ice_cream.n.01', 'synonyms': ['icecream'], 'def': 'frozen dessert containing cream and sugar and flavoring', 'name': 'icecream'}, {'frequency': 'r', 'id': 590, 'synset': 'ice_lolly.n.01', 'synonyms': ['popsicle'], 'def': 'ice cream or water ice on a small wooden stick', 'name': 'popsicle'}, {'frequency': 'c', 'id': 591, 'synset': 'ice_maker.n.01', 'synonyms': ['ice_maker'], 'def': 'an appliance included in some electric refrigerators for making ice cubes', 'name': 'ice_maker'}, {'frequency': 'r', 'id': 592, 'synset': 'ice_pack.n.01', 'synonyms': ['ice_pack', 'ice_bag'], 'def': 'a waterproof bag filled with ice: applied to the body (especially the head) to cool or reduce swelling', 'name': 'ice_pack'}, {'frequency': 'r', 'id': 593, 'synset': 'ice_skate.n.01', 'synonyms': ['ice_skate'], 'def': 'skate consisting of a boot with a steel blade fitted to the sole', 'name': 'ice_skate'}, {'frequency': 'r', 'id': 594, 'synset': 'ice_tea.n.01', 'synonyms': ['ice_tea', 'iced_tea'], 'def': 'strong tea served over ice', 'name': 'ice_tea'}, {'frequency': 'c', 'id': 595, 'synset': 'igniter.n.01', 'synonyms': ['igniter', 'ignitor', 'lighter'], 'def': 'a substance or device used to start a fire', 'name': 'igniter'}, {'frequency': 'r', 'id': 596, 'synset': 'incense.n.01', 'synonyms': ['incense'], 'def': 'a substance that produces a fragrant odor when burned', 'name': 'incense'}, {'frequency': 'r', 'id': 597, 'synset': 'inhaler.n.01', 'synonyms': ['inhaler', 'inhalator'], 'def': 'a dispenser that produces a chemical vapor to be inhaled through mouth or nose', 'name': 'inhaler'}, {'frequency': 'c', 'id': 598, 'synset': 'ipod.n.01', 'synonyms': ['iPod'], 'def': 'a pocket-sized device used to play music files', 'name': 'iPod'}, {'frequency': 'c', 'id': 599, 'synset': 'iron.n.04', 'synonyms': ['iron_(for_clothing)', 'smoothing_iron_(for_clothing)'], 'def': 'home appliance consisting of a flat metal base that is heated and used to smooth cloth', 'name': 'iron_(for_clothing)'}, {'frequency': 'r', 'id': 600, 'synset': 'ironing_board.n.01', 'synonyms': ['ironing_board'], 'def': 'narrow padded board on collapsible supports; used for ironing clothes', 'name': 'ironing_board'}, {'frequency': 'f', 'id': 601, 'synset': 'jacket.n.01', 'synonyms': ['jacket'], 'def': 'a waist-length coat', 'name': 'jacket'}, {'frequency': 'r', 'id': 602, 'synset': 'jam.n.01', 'synonyms': ['jam'], 'def': 'preserve of crushed fruit', 'name': 'jam'}, {'frequency': 'f', 'id': 603, 'synset': 'jean.n.01', 'synonyms': ['jean', 'blue_jean', 'denim'], 'def': '(usually plural) close-fitting trousers of heavy denim for manual work or casual wear', 'name': 'jean'}, {'frequency': 'c', 'id': 604, 'synset': 'jeep.n.01', 'synonyms': ['jeep', 'landrover'], 'def': 'a car suitable for traveling over rough terrain', 'name': 'jeep'}, {'frequency': 'r', 'id': 605, 'synset': 'jelly_bean.n.01', 'synonyms': ['jelly_bean', 'jelly_egg'], 'def': 'sugar-glazed jellied candy', 'name': 'jelly_bean'}, {'frequency': 'f', 'id': 606, 'synset': 'jersey.n.03', 'synonyms': ['jersey', 'T-shirt', 'tee_shirt'], 'def': 'a close-fitting pullover shirt', 'name': 'jersey'}, {'frequency': 'c', 'id': 607, 'synset': 'jet.n.01', 'synonyms': ['jet_plane', 'jet-propelled_plane'], 'def': 'an airplane powered by one or more jet engines', 'name': 'jet_plane'}, {'frequency': 'c', 'id': 608, 'synset': 'jewelry.n.01', 'synonyms': ['jewelry', 'jewellery'], 'def': 'an adornment (as a bracelet or ring or necklace) made of precious metals and set with gems (or imitation gems)', 'name': 'jewelry'}, {'frequency': 'r', 'id': 609, 'synset': 'joystick.n.02', 'synonyms': ['joystick'], 'def': 'a control device for computers consisting of a vertical handle that can move freely in two directions', 'name': 'joystick'}, {'frequency': 'r', 'id': 610, 'synset': 'jump_suit.n.01', 'synonyms': ['jumpsuit'], 'def': "one-piece garment fashioned after a parachutist's uniform", 'name': 'jumpsuit'}, {'frequency': 'c', 'id': 611, 'synset': 'kayak.n.01', 'synonyms': ['kayak'], 'def': 'a small canoe consisting of a light frame made watertight with animal skins', 'name': 'kayak'}, {'frequency': 'r', 'id': 612, 'synset': 'keg.n.02', 'synonyms': ['keg'], 'def': 'small cask or barrel', 'name': 'keg'}, {'frequency': 'r', 'id': 613, 'synset': 'kennel.n.01', 'synonyms': ['kennel', 'doghouse'], 'def': 'outbuilding that serves as a shelter for a dog', 'name': 'kennel'}, {'frequency': 'c', 'id': 614, 'synset': 'kettle.n.01', 'synonyms': ['kettle', 'boiler'], 'def': 'a metal pot for stewing or boiling; usually has a lid', 'name': 'kettle'}, {'frequency': 'f', 'id': 615, 'synset': 'key.n.01', 'synonyms': ['key'], 'def': 'metal instrument used to unlock a lock', 'name': 'key'}, {'frequency': 'r', 'id': 616, 'synset': 'keycard.n.01', 'synonyms': ['keycard'], 'def': 'a plastic card used to gain access typically to a door', 'name': 'keycard'}, {'frequency': 'r', 'id': 617, 'synset': 'kilt.n.01', 'synonyms': ['kilt'], 'def': 'a knee-length pleated tartan skirt worn by men as part of the traditional dress in the Highlands of northern Scotland', 'name': 'kilt'}, {'frequency': 'c', 'id': 618, 'synset': 'kimono.n.01', 'synonyms': ['kimono'], 'def': 'a loose robe; imitated from robes originally worn by Japanese', 'name': 'kimono'}, {'frequency': 'f', 'id': 619, 'synset': 'kitchen_sink.n.01', 'synonyms': ['kitchen_sink'], 'def': 'a sink in a kitchen', 'name': 'kitchen_sink'}, {'frequency': 'c', 'id': 620, 'synset': 'kitchen_table.n.01', 'synonyms': ['kitchen_table'], 'def': 'a table in the kitchen', 'name': 'kitchen_table'}, {'frequency': 'f', 'id': 621, 'synset': 'kite.n.03', 'synonyms': ['kite'], 'def': 'plaything consisting of a light frame covered with tissue paper; flown in wind at end of a string', 'name': 'kite'}, {'frequency': 'c', 'id': 622, 'synset': 'kitten.n.01', 'synonyms': ['kitten', 'kitty'], 'def': 'young domestic cat', 'name': 'kitten'}, {'frequency': 'c', 'id': 623, 'synset': 'kiwi.n.03', 'synonyms': ['kiwi_fruit'], 'def': 'fuzzy brown egg-shaped fruit with slightly tart green flesh', 'name': 'kiwi_fruit'}, {'frequency': 'f', 'id': 624, 'synset': 'knee_pad.n.01', 'synonyms': ['knee_pad'], 'def': 'protective garment consisting of a pad worn by football or baseball or hockey players', 'name': 'knee_pad'}, {'frequency': 'f', 'id': 625, 'synset': 'knife.n.01', 'synonyms': ['knife'], 'def': 'tool with a blade and point used as a cutting instrument', 'name': 'knife'}, {'frequency': 'r', 'id': 626, 'synset': 'knight.n.02', 'synonyms': ['knight_(chess_piece)', 'horse_(chess_piece)'], 'def': 'a chess game piece shaped to resemble the head of a horse', 'name': 'knight_(chess_piece)'}, {'frequency': 'r', 'id': 627, 'synset': 'knitting_needle.n.01', 'synonyms': ['knitting_needle'], 'def': 'needle consisting of a slender rod with pointed ends; usually used in pairs', 'name': 'knitting_needle'}, {'frequency': 'f', 'id': 628, 'synset': 'knob.n.02', 'synonyms': ['knob'], 'def': 'a round handle often found on a door', 'name': 'knob'}, {'frequency': 'r', 'id': 629, 'synset': 'knocker.n.05', 'synonyms': ['knocker_(on_a_door)', 'doorknocker'], 'def': 'a device (usually metal and ornamental) attached by a hinge to a door', 'name': 'knocker_(on_a_door)'}, {'frequency': 'r', 'id': 630, 'synset': 'koala.n.01', 'synonyms': ['koala', 'koala_bear'], 'def': 'sluggish tailless Australian marsupial with grey furry ears and coat', 'name': 'koala'}, {'frequency': 'r', 'id': 631, 'synset': 'lab_coat.n.01', 'synonyms': ['lab_coat', 'laboratory_coat'], 'def': 'a light coat worn to protect clothing from substances used while working in a laboratory', 'name': 'lab_coat'}, {'frequency': 'f', 'id': 632, 'synset': 'ladder.n.01', 'synonyms': ['ladder'], 'def': 'steps consisting of two parallel members connected by rungs', 'name': 'ladder'}, {'frequency': 'c', 'id': 633, 'synset': 'ladle.n.01', 'synonyms': ['ladle'], 'def': 'a spoon-shaped vessel with a long handle frequently used to transfer liquids', 'name': 'ladle'}, {'frequency': 'r', 'id': 634, 'synset': 'ladybug.n.01', 'synonyms': ['ladybug', 'ladybeetle', 'ladybird_beetle'], 'def': 'small round bright-colored and spotted beetle, typically red and black', 'name': 'ladybug'}, {'frequency': 'c', 'id': 635, 'synset': 'lamb.n.01', 'synonyms': ['lamb_(animal)'], 'def': 'young sheep', 'name': 'lamb_(animal)'}, {'frequency': 'r', 'id': 636, 'synset': 'lamb_chop.n.01', 'synonyms': ['lamb-chop', 'lambchop'], 'def': 'chop cut from a lamb', 'name': 'lamb-chop'}, {'frequency': 'f', 'id': 637, 'synset': 'lamp.n.02', 'synonyms': ['lamp'], 'def': 'a piece of furniture holding one or more electric light bulbs', 'name': 'lamp'}, {'frequency': 'f', 'id': 638, 'synset': 'lamppost.n.01', 'synonyms': ['lamppost'], 'def': 'a metal post supporting an outdoor lamp (such as a streetlight)', 'name': 'lamppost'}, {'frequency': 'f', 'id': 639, 'synset': 'lampshade.n.01', 'synonyms': ['lampshade'], 'def': 'a protective ornamental shade used to screen a light bulb from direct view', 'name': 'lampshade'}, {'frequency': 'c', 'id': 640, 'synset': 'lantern.n.01', 'synonyms': ['lantern'], 'def': 'light in a transparent protective case', 'name': 'lantern'}, {'frequency': 'f', 'id': 641, 'synset': 'lanyard.n.02', 'synonyms': ['lanyard', 'laniard'], 'def': 'a cord worn around the neck to hold a knife or whistle, etc.', 'name': 'lanyard'}, {'frequency': 'f', 'id': 642, 'synset': 'laptop.n.01', 'synonyms': ['laptop_computer', 'notebook_computer'], 'def': 'a portable computer small enough to use in your lap', 'name': 'laptop_computer'}, {'frequency': 'r', 'id': 643, 'synset': 'lasagna.n.01', 'synonyms': ['lasagna', 'lasagne'], 'def': 'baked dish of layers of lasagna pasta with sauce and cheese and meat or vegetables', 'name': 'lasagna'}, {'frequency': 'c', 'id': 644, 'synset': 'latch.n.02', 'synonyms': ['latch'], 'def': 'a bar that can be lowered or slid into a groove to fasten a door or gate', 'name': 'latch'}, {'frequency': 'r', 'id': 645, 'synset': 'lawn_mower.n.01', 'synonyms': ['lawn_mower'], 'def': 'garden tool for mowing grass on lawns', 'name': 'lawn_mower'}, {'frequency': 'r', 'id': 646, 'synset': 'leather.n.01', 'synonyms': ['leather'], 'def': 'an animal skin made smooth and flexible by removing the hair and then tanning', 'name': 'leather'}, {'frequency': 'c', 'id': 647, 'synset': 'legging.n.01', 'synonyms': ['legging_(clothing)', 'leging_(clothing)', 'leg_covering'], 'def': 'a garment covering the leg (usually extending from the knee to the ankle)', 'name': 'legging_(clothing)'}, {'frequency': 'c', 'id': 648, 'synset': 'lego.n.01', 'synonyms': ['Lego', 'Lego_set'], 'def': "a child's plastic construction set for making models from blocks", 'name': 'Lego'}, {'frequency': 'f', 'id': 649, 'synset': 'lemon.n.01', 'synonyms': ['lemon'], 'def': 'yellow oval fruit with juicy acidic flesh', 'name': 'lemon'}, {'frequency': 'r', 'id': 650, 'synset': 'lemonade.n.01', 'synonyms': ['lemonade'], 'def': 'sweetened beverage of diluted lemon juice', 'name': 'lemonade'}, {'frequency': 'f', 'id': 651, 'synset': 'lettuce.n.02', 'synonyms': ['lettuce'], 'def': 'leafy plant commonly eaten in salad or on sandwiches', 'name': 'lettuce'}, {'frequency': 'f', 'id': 652, 'synset': 'license_plate.n.01', 'synonyms': ['license_plate', 'numberplate'], 'def': "a plate mounted on the front and back of car and bearing the car's registration number", 'name': 'license_plate'}, {'frequency': 'f', 'id': 653, 'synset': 'life_buoy.n.01', 'synonyms': ['life_buoy', 'lifesaver', 'life_belt', 'life_ring'], 'def': 'a ring-shaped life preserver used to prevent drowning (NOT a life-jacket or vest)', 'name': 'life_buoy'}, {'frequency': 'f', 'id': 654, 'synset': 'life_jacket.n.01', 'synonyms': ['life_jacket', 'life_vest'], 'def': 'life preserver consisting of a sleeveless jacket of buoyant or inflatable design', 'name': 'life_jacket'}, {'frequency': 'f', 'id': 655, 'synset': 'light_bulb.n.01', 'synonyms': ['lightbulb'], 'def': 'glass bulb or tube shaped electric device that emits light (DO NOT MARK LAMPS AS A WHOLE)', 'name': 'lightbulb'}, {'frequency': 'r', 'id': 656, 'synset': 'lightning_rod.n.02', 'synonyms': ['lightning_rod', 'lightning_conductor'], 'def': 'a metallic conductor that is attached to a high point and leads to the ground', 'name': 'lightning_rod'}, {'frequency': 'c', 'id': 657, 'synset': 'lime.n.06', 'synonyms': ['lime'], 'def': 'the green acidic fruit of any of various lime trees', 'name': 'lime'}, {'frequency': 'r', 'id': 658, 'synset': 'limousine.n.01', 'synonyms': ['limousine'], 'def': 'long luxurious car; usually driven by a chauffeur', 'name': 'limousine'}, {'frequency': 'r', 'id': 659, 'synset': 'linen.n.02', 'synonyms': ['linen_paper'], 'def': 'a high-quality paper made of linen fibers or with a linen finish', 'name': 'linen_paper'}, {'frequency': 'c', 'id': 660, 'synset': 'lion.n.01', 'synonyms': ['lion'], 'def': 'large gregarious predatory cat of Africa and India', 'name': 'lion'}, {'frequency': 'c', 'id': 661, 'synset': 'lip_balm.n.01', 'synonyms': ['lip_balm'], 'def': 'a balm applied to the lips', 'name': 'lip_balm'}, {'frequency': 'c', 'id': 662, 'synset': 'lipstick.n.01', 'synonyms': ['lipstick', 'lip_rouge'], 'def': 'makeup that is used to color the lips', 'name': 'lipstick'}, {'frequency': 'r', 'id': 663, 'synset': 'liquor.n.01', 'synonyms': ['liquor', 'spirits', 'hard_liquor', 'liqueur', 'cordial'], 'def': 'an alcoholic beverage that is distilled rather than fermented', 'name': 'liquor'}, {'frequency': 'r', 'id': 664, 'synset': 'lizard.n.01', 'synonyms': ['lizard'], 'def': 'a reptile with usually two pairs of legs and a tapering tail', 'name': 'lizard'}, {'frequency': 'r', 'id': 665, 'synset': 'loafer.n.02', 'synonyms': ['Loafer_(type_of_shoe)'], 'def': 'a low leather step-in shoe', 'name': 'Loafer_(type_of_shoe)'}, {'frequency': 'f', 'id': 666, 'synset': 'log.n.01', 'synonyms': ['log'], 'def': 'a segment of the trunk of a tree when stripped of branches', 'name': 'log'}, {'frequency': 'c', 'id': 667, 'synset': 'lollipop.n.02', 'synonyms': ['lollipop'], 'def': 'hard candy on a stick', 'name': 'lollipop'}, {'frequency': 'c', 'id': 668, 'synset': 'lotion.n.01', 'synonyms': ['lotion'], 'def': 'any of various cosmetic preparations that are applied to the skin', 'name': 'lotion'}, {'frequency': 'f', 'id': 669, 'synset': 'loudspeaker.n.01', 'synonyms': ['speaker_(stero_equipment)'], 'def': 'electronic device that produces sound often as part of a stereo system', 'name': 'speaker_(stero_equipment)'}, {'frequency': 'c', 'id': 670, 'synset': 'love_seat.n.01', 'synonyms': ['loveseat'], 'def': 'small sofa that seats two people', 'name': 'loveseat'}, {'frequency': 'r', 'id': 671, 'synset': 'machine_gun.n.01', 'synonyms': ['machine_gun'], 'def': 'a rapidly firing automatic gun', 'name': 'machine_gun'}, {'frequency': 'f', 'id': 672, 'synset': 'magazine.n.02', 'synonyms': ['magazine'], 'def': 'a paperback periodic publication', 'name': 'magazine'}, {'frequency': 'f', 'id': 673, 'synset': 'magnet.n.01', 'synonyms': ['magnet'], 'def': 'a device that attracts iron and produces a magnetic field', 'name': 'magnet'}, {'frequency': 'r', 'id': 674, 'synset': 'mail_slot.n.01', 'synonyms': ['mail_slot'], 'def': 'a slot (usually in a door) through which mail can be delivered', 'name': 'mail_slot'}, {'frequency': 'c', 'id': 675, 'synset': 'mailbox.n.01', 'synonyms': ['mailbox_(at_home)', 'letter_box_(at_home)'], 'def': 'a private box for delivery of mail', 'name': 'mailbox_(at_home)'}, {'frequency': 'r', 'id': 676, 'synset': 'mallet.n.01', 'synonyms': ['mallet'], 'def': 'a sports implement with a long handle and a hammer-like head used to hit a ball', 'name': 'mallet'}, {'frequency': 'r', 'id': 677, 'synset': 'mammoth.n.01', 'synonyms': ['mammoth'], 'def': 'any of numerous extinct elephants widely distributed in the Pleistocene', 'name': 'mammoth'}, {'frequency': 'c', 'id': 678, 'synset': 'mandarin.n.05', 'synonyms': ['mandarin_orange'], 'def': 'a somewhat flat reddish-orange loose skinned citrus of China', 'name': 'mandarin_orange'}, {'frequency': 'c', 'id': 679, 'synset': 'manger.n.01', 'synonyms': ['manger', 'trough'], 'def': 'a container (usually in a barn or stable) from which cattle or horses feed', 'name': 'manger'}, {'frequency': 'f', 'id': 680, 'synset': 'manhole.n.01', 'synonyms': ['manhole'], 'def': 'a hole (usually with a flush cover) through which a person can gain access to an underground structure', 'name': 'manhole'}, {'frequency': 'c', 'id': 681, 'synset': 'map.n.01', 'synonyms': ['map'], 'def': "a diagrammatic representation of the earth's surface (or part of it)", 'name': 'map'}, {'frequency': 'c', 'id': 682, 'synset': 'marker.n.03', 'synonyms': ['marker'], 'def': 'a writing implement for making a mark', 'name': 'marker'}, {'frequency': 'r', 'id': 683, 'synset': 'martini.n.01', 'synonyms': ['martini'], 'def': 'a cocktail made of gin (or vodka) with dry vermouth', 'name': 'martini'}, {'frequency': 'r', 'id': 684, 'synset': 'mascot.n.01', 'synonyms': ['mascot'], 'def': 'a person or animal that is adopted by a team or other group as a symbolic figure', 'name': 'mascot'}, {'frequency': 'c', 'id': 685, 'synset': 'mashed_potato.n.01', 'synonyms': ['mashed_potato'], 'def': 'potato that has been peeled and boiled and then mashed', 'name': 'mashed_potato'}, {'frequency': 'r', 'id': 686, 'synset': 'masher.n.02', 'synonyms': ['masher'], 'def': 'a kitchen utensil used for mashing (e.g. potatoes)', 'name': 'masher'}, {'frequency': 'f', 'id': 687, 'synset': 'mask.n.04', 'synonyms': ['mask', 'facemask'], 'def': 'a protective covering worn over the face', 'name': 'mask'}, {'frequency': 'f', 'id': 688, 'synset': 'mast.n.01', 'synonyms': ['mast'], 'def': 'a vertical spar for supporting sails', 'name': 'mast'}, {'frequency': 'c', 'id': 689, 'synset': 'mat.n.03', 'synonyms': ['mat_(gym_equipment)', 'gym_mat'], 'def': 'sports equipment consisting of a piece of thick padding on the floor for gymnastics', 'name': 'mat_(gym_equipment)'}, {'frequency': 'r', 'id': 690, 'synset': 'matchbox.n.01', 'synonyms': ['matchbox'], 'def': 'a box for holding matches', 'name': 'matchbox'}, {'frequency': 'f', 'id': 691, 'synset': 'mattress.n.01', 'synonyms': ['mattress'], 'def': 'a thick pad filled with resilient material used as a bed or part of a bed', 'name': 'mattress'}, {'frequency': 'c', 'id': 692, 'synset': 'measuring_cup.n.01', 'synonyms': ['measuring_cup'], 'def': 'graduated cup used to measure liquid or granular ingredients', 'name': 'measuring_cup'}, {'frequency': 'c', 'id': 693, 'synset': 'measuring_stick.n.01', 'synonyms': ['measuring_stick', 'ruler_(measuring_stick)', 'measuring_rod'], 'def': 'measuring instrument having a sequence of marks at regular intervals', 'name': 'measuring_stick'}, {'frequency': 'c', 'id': 694, 'synset': 'meatball.n.01', 'synonyms': ['meatball'], 'def': 'ground meat formed into a ball and fried or simmered in broth', 'name': 'meatball'}, {'frequency': 'c', 'id': 695, 'synset': 'medicine.n.02', 'synonyms': ['medicine'], 'def': 'something that treats or prevents or alleviates the symptoms of disease', 'name': 'medicine'}, {'frequency': 'r', 'id': 696, 'synset': 'melon.n.01', 'synonyms': ['melon'], 'def': 'fruit of the gourd family having a hard rind and sweet juicy flesh', 'name': 'melon'}, {'frequency': 'f', 'id': 697, 'synset': 'microphone.n.01', 'synonyms': ['microphone'], 'def': 'device for converting sound waves into electrical energy', 'name': 'microphone'}, {'frequency': 'r', 'id': 698, 'synset': 'microscope.n.01', 'synonyms': ['microscope'], 'def': 'magnifier of the image of small objects', 'name': 'microscope'}, {'frequency': 'f', 'id': 699, 'synset': 'microwave.n.02', 'synonyms': ['microwave_oven'], 'def': 'kitchen appliance that cooks food by passing an electromagnetic wave through it', 'name': 'microwave_oven'}, {'frequency': 'r', 'id': 700, 'synset': 'milestone.n.01', 'synonyms': ['milestone', 'milepost'], 'def': 'stone post at side of a road to show distances', 'name': 'milestone'}, {'frequency': 'c', 'id': 701, 'synset': 'milk.n.01', 'synonyms': ['milk'], 'def': 'a white nutritious liquid secreted by mammals and used as food by human beings', 'name': 'milk'}, {'frequency': 'f', 'id': 702, 'synset': 'minivan.n.01', 'synonyms': ['minivan'], 'def': 'a small box-shaped passenger van', 'name': 'minivan'}, {'frequency': 'r', 'id': 703, 'synset': 'mint.n.05', 'synonyms': ['mint_candy'], 'def': 'a candy that is flavored with a mint oil', 'name': 'mint_candy'}, {'frequency': 'f', 'id': 704, 'synset': 'mirror.n.01', 'synonyms': ['mirror'], 'def': 'polished surface that forms images by reflecting light', 'name': 'mirror'}, {'frequency': 'c', 'id': 705, 'synset': 'mitten.n.01', 'synonyms': ['mitten'], 'def': 'glove that encases the thumb separately and the other four fingers together', 'name': 'mitten'}, {'frequency': 'c', 'id': 706, 'synset': 'mixer.n.04', 'synonyms': ['mixer_(kitchen_tool)', 'stand_mixer'], 'def': 'a kitchen utensil that is used for mixing foods', 'name': 'mixer_(kitchen_tool)'}, {'frequency': 'c', 'id': 707, 'synset': 'money.n.03', 'synonyms': ['money'], 'def': 'the official currency issued by a government or national bank', 'name': 'money'}, {'frequency': 'f', 'id': 708, 'synset': 'monitor.n.04', 'synonyms': ['monitor_(computer_equipment) computer_monitor'], 'def': 'a computer monitor', 'name': 'monitor_(computer_equipment) computer_monitor'}, {'frequency': 'c', 'id': 709, 'synset': 'monkey.n.01', 'synonyms': ['monkey'], 'def': 'any of various long-tailed primates', 'name': 'monkey'}, {'frequency': 'f', 'id': 710, 'synset': 'motor.n.01', 'synonyms': ['motor'], 'def': 'machine that converts other forms of energy into mechanical energy and so imparts motion', 'name': 'motor'}, {'frequency': 'f', 'id': 711, 'synset': 'motor_scooter.n.01', 'synonyms': ['motor_scooter', 'scooter'], 'def': 'a wheeled vehicle with small wheels and a low-powered engine', 'name': 'motor_scooter'}, {'frequency': 'r', 'id': 712, 'synset': 'motor_vehicle.n.01', 'synonyms': ['motor_vehicle', 'automotive_vehicle'], 'def': 'a self-propelled wheeled vehicle that does not run on rails', 'name': 'motor_vehicle'}, {'frequency': 'r', 'id': 713, 'synset': 'motorboat.n.01', 'synonyms': ['motorboat', 'powerboat'], 'def': 'a boat propelled by an internal-combustion engine', 'name': 'motorboat'}, {'frequency': 'f', 'id': 714, 'synset': 'motorcycle.n.01', 'synonyms': ['motorcycle'], 'def': 'a motor vehicle with two wheels and a strong frame', 'name': 'motorcycle'}, {'frequency': 'f', 'id': 715, 'synset': 'mound.n.01', 'synonyms': ['mound_(baseball)', "pitcher's_mound"], 'def': '(baseball) the slight elevation on which the pitcher stands', 'name': 'mound_(baseball)'}, {'frequency': 'r', 'id': 716, 'synset': 'mouse.n.01', 'synonyms': ['mouse_(animal_rodent)'], 'def': 'a small rodent with pointed snouts and small ears on elongated bodies with slender usually hairless tails', 'name': 'mouse_(animal_rodent)'}, {'frequency': 'f', 'id': 717, 'synset': 'mouse.n.04', 'synonyms': ['mouse_(computer_equipment)', 'computer_mouse'], 'def': 'a computer input device that controls an on-screen pointer', 'name': 'mouse_(computer_equipment)'}, {'frequency': 'f', 'id': 718, 'synset': 'mousepad.n.01', 'synonyms': ['mousepad'], 'def': 'a small portable pad that provides an operating surface for a computer mouse', 'name': 'mousepad'}, {'frequency': 'c', 'id': 719, 'synset': 'muffin.n.01', 'synonyms': ['muffin'], 'def': 'a sweet quick bread baked in a cup-shaped pan', 'name': 'muffin'}, {'frequency': 'f', 'id': 720, 'synset': 'mug.n.04', 'synonyms': ['mug'], 'def': 'with handle and usually cylindrical', 'name': 'mug'}, {'frequency': 'f', 'id': 721, 'synset': 'mushroom.n.02', 'synonyms': ['mushroom'], 'def': 'a common mushroom', 'name': 'mushroom'}, {'frequency': 'r', 'id': 722, 'synset': 'music_stool.n.01', 'synonyms': ['music_stool', 'piano_stool'], 'def': 'a stool for piano players; usually adjustable in height', 'name': 'music_stool'}, {'frequency': 'r', 'id': 723, 'synset': 'musical_instrument.n.01', 'synonyms': ['musical_instrument', 'instrument_(musical)'], 'def': 'any of various devices or contrivances that can be used to produce musical tones or sounds', 'name': 'musical_instrument'}, {'frequency': 'r', 'id': 724, 'synset': 'nailfile.n.01', 'synonyms': ['nailfile'], 'def': 'a small flat file for shaping the nails', 'name': 'nailfile'}, {'frequency': 'r', 'id': 725, 'synset': 'nameplate.n.01', 'synonyms': ['nameplate'], 'def': 'a plate bearing a name', 'name': 'nameplate'}, {'frequency': 'f', 'id': 726, 'synset': 'napkin.n.01', 'synonyms': ['napkin', 'table_napkin', 'serviette'], 'def': 'a small piece of table linen or paper that is used to wipe the mouth and to cover the lap in order to protect clothing', 'name': 'napkin'}, {'frequency': 'r', 'id': 727, 'synset': 'neckerchief.n.01', 'synonyms': ['neckerchief'], 'def': 'a kerchief worn around the neck', 'name': 'neckerchief'}, {'frequency': 'f', 'id': 728, 'synset': 'necklace.n.01', 'synonyms': ['necklace'], 'def': 'jewelry consisting of a cord or chain (often bearing gems) worn about the neck as an ornament', 'name': 'necklace'}, {'frequency': 'f', 'id': 729, 'synset': 'necktie.n.01', 'synonyms': ['necktie', 'tie_(necktie)'], 'def': 'neckwear consisting of a long narrow piece of material worn under a collar and tied in knot at the front', 'name': 'necktie'}, {'frequency': 'r', 'id': 730, 'synset': 'needle.n.03', 'synonyms': ['needle'], 'def': 'a sharp pointed implement (usually metal)', 'name': 'needle'}, {'frequency': 'c', 'id': 731, 'synset': 'nest.n.01', 'synonyms': ['nest'], 'def': 'a structure in which animals lay eggs or give birth to their young', 'name': 'nest'}, {'frequency': 'r', 'id': 732, 'synset': 'newsstand.n.01', 'synonyms': ['newsstand'], 'def': 'a stall where newspapers and other periodicals are sold', 'name': 'newsstand'}, {'frequency': 'c', 'id': 733, 'synset': 'nightwear.n.01', 'synonyms': ['nightshirt', 'nightwear', 'sleepwear', 'nightclothes'], 'def': 'garments designed to be worn in bed', 'name': 'nightshirt'}, {'frequency': 'r', 'id': 734, 'synset': 'nosebag.n.01', 'synonyms': ['nosebag_(for_animals)', 'feedbag'], 'def': 'a canvas bag that is used to feed an animal (such as a horse); covers the muzzle and fastens at the top of the head', 'name': 'nosebag_(for_animals)'}, {'frequency': 'r', 'id': 735, 'synset': 'noseband.n.01', 'synonyms': ['noseband_(for_animals)', 'nosepiece_(for_animals)'], 'def': "a strap that is the part of a bridle that goes over the animal's nose", 'name': 'noseband_(for_animals)'}, {'frequency': 'f', 'id': 736, 'synset': 'notebook.n.01', 'synonyms': ['notebook'], 'def': 'a book with blank pages for recording notes or memoranda', 'name': 'notebook'}, {'frequency': 'c', 'id': 737, 'synset': 'notepad.n.01', 'synonyms': ['notepad'], 'def': 'a pad of paper for keeping notes', 'name': 'notepad'}, {'frequency': 'c', 'id': 738, 'synset': 'nut.n.03', 'synonyms': ['nut'], 'def': 'a small metal block (usually square or hexagonal) with internal screw thread to be fitted onto a bolt', 'name': 'nut'}, {'frequency': 'r', 'id': 739, 'synset': 'nutcracker.n.01', 'synonyms': ['nutcracker'], 'def': 'a hand tool used to crack nuts open', 'name': 'nutcracker'}, {'frequency': 'c', 'id': 740, 'synset': 'oar.n.01', 'synonyms': ['oar'], 'def': 'an implement used to propel or steer a boat', 'name': 'oar'}, {'frequency': 'r', 'id': 741, 'synset': 'octopus.n.01', 'synonyms': ['octopus_(food)'], 'def': 'tentacles of octopus prepared as food', 'name': 'octopus_(food)'}, {'frequency': 'r', 'id': 742, 'synset': 'octopus.n.02', 'synonyms': ['octopus_(animal)'], 'def': 'bottom-living cephalopod having a soft oval body with eight long tentacles', 'name': 'octopus_(animal)'}, {'frequency': 'c', 'id': 743, 'synset': 'oil_lamp.n.01', 'synonyms': ['oil_lamp', 'kerosene_lamp', 'kerosine_lamp'], 'def': 'a lamp that burns oil (as kerosine) for light', 'name': 'oil_lamp'}, {'frequency': 'c', 'id': 744, 'synset': 'olive_oil.n.01', 'synonyms': ['olive_oil'], 'def': 'oil from olives', 'name': 'olive_oil'}, {'frequency': 'r', 'id': 745, 'synset': 'omelet.n.01', 'synonyms': ['omelet', 'omelette'], 'def': 'beaten eggs cooked until just set; may be folded around e.g. ham or cheese or jelly', 'name': 'omelet'}, {'frequency': 'f', 'id': 746, 'synset': 'onion.n.01', 'synonyms': ['onion'], 'def': 'the bulb of an onion plant', 'name': 'onion'}, {'frequency': 'f', 'id': 747, 'synset': 'orange.n.01', 'synonyms': ['orange_(fruit)'], 'def': 'orange (FRUIT of an orange tree)', 'name': 'orange_(fruit)'}, {'frequency': 'c', 'id': 748, 'synset': 'orange_juice.n.01', 'synonyms': ['orange_juice'], 'def': 'bottled or freshly squeezed juice of oranges', 'name': 'orange_juice'}, {'frequency': 'r', 'id': 749, 'synset': 'oregano.n.01', 'synonyms': ['oregano', 'marjoram'], 'def': 'aromatic Eurasian perennial herb used in cooking and baking', 'name': 'oregano'}, {'frequency': 'c', 'id': 750, 'synset': 'ostrich.n.02', 'synonyms': ['ostrich'], 'def': 'fast-running African flightless bird with two-toed feet; largest living bird', 'name': 'ostrich'}, {'frequency': 'c', 'id': 751, 'synset': 'ottoman.n.03', 'synonyms': ['ottoman', 'pouf', 'pouffe', 'hassock'], 'def': 'thick cushion used as a seat', 'name': 'ottoman'}, {'frequency': 'c', 'id': 752, 'synset': 'overall.n.01', 'synonyms': ['overalls_(clothing)'], 'def': 'work clothing consisting of denim trousers usually with a bib and shoulder straps', 'name': 'overalls_(clothing)'}, {'frequency': 'c', 'id': 753, 'synset': 'owl.n.01', 'synonyms': ['owl'], 'def': 'nocturnal bird of prey with hawk-like beak and claws and large head with front-facing eyes', 'name': 'owl'}, {'frequency': 'c', 'id': 754, 'synset': 'packet.n.03', 'synonyms': ['packet'], 'def': 'a small package or bundle', 'name': 'packet'}, {'frequency': 'r', 'id': 755, 'synset': 'pad.n.03', 'synonyms': ['inkpad', 'inking_pad', 'stamp_pad'], 'def': 'absorbent material saturated with ink used to transfer ink evenly to a rubber stamp', 'name': 'inkpad'}, {'frequency': 'c', 'id': 756, 'synset': 'pad.n.04', 'synonyms': ['pad'], 'def': 'a flat mass of soft material used for protection, stuffing, or comfort', 'name': 'pad'}, {'frequency': 'c', 'id': 757, 'synset': 'paddle.n.04', 'synonyms': ['paddle', 'boat_paddle'], 'def': 'a short light oar used without an oarlock to propel a canoe or small boat', 'name': 'paddle'}, {'frequency': 'c', 'id': 758, 'synset': 'padlock.n.01', 'synonyms': ['padlock'], 'def': 'a detachable, portable lock', 'name': 'padlock'}, {'frequency': 'r', 'id': 759, 'synset': 'paintbox.n.01', 'synonyms': ['paintbox'], 'def': "a box containing a collection of cubes or tubes of artists' paint", 'name': 'paintbox'}, {'frequency': 'c', 'id': 760, 'synset': 'paintbrush.n.01', 'synonyms': ['paintbrush'], 'def': 'a brush used as an applicator to apply paint', 'name': 'paintbrush'}, {'frequency': 'f', 'id': 761, 'synset': 'painting.n.01', 'synonyms': ['painting'], 'def': 'graphic art consisting of an artistic composition made by applying paints to a surface', 'name': 'painting'}, {'frequency': 'c', 'id': 762, 'synset': 'pajama.n.02', 'synonyms': ['pajamas', 'pyjamas'], 'def': 'loose-fitting nightclothes worn for sleeping or lounging', 'name': 'pajamas'}, {'frequency': 'c', 'id': 763, 'synset': 'palette.n.02', 'synonyms': ['palette', 'pallet'], 'def': 'board that provides a flat surface on which artists mix paints and the range of colors used', 'name': 'palette'}, {'frequency': 'f', 'id': 764, 'synset': 'pan.n.01', 'synonyms': ['pan_(for_cooking)', 'cooking_pan'], 'def': 'cooking utensil consisting of a wide metal vessel', 'name': 'pan_(for_cooking)'}, {'frequency': 'r', 'id': 765, 'synset': 'pan.n.03', 'synonyms': ['pan_(metal_container)'], 'def': 'shallow container made of metal', 'name': 'pan_(metal_container)'}, {'frequency': 'c', 'id': 766, 'synset': 'pancake.n.01', 'synonyms': ['pancake'], 'def': 'a flat cake of thin batter fried on both sides on a griddle', 'name': 'pancake'}, {'frequency': 'r', 'id': 767, 'synset': 'pantyhose.n.01', 'synonyms': ['pantyhose'], 'def': "a woman's tights consisting of underpants and stockings", 'name': 'pantyhose'}, {'frequency': 'r', 'id': 768, 'synset': 'papaya.n.02', 'synonyms': ['papaya'], 'def': 'large oval melon-like tropical fruit with yellowish flesh', 'name': 'papaya'}, {'frequency': 'r', 'id': 769, 'synset': 'paper_clip.n.01', 'synonyms': ['paperclip'], 'def': 'a wire or plastic clip for holding sheets of paper together', 'name': 'paperclip'}, {'frequency': 'f', 'id': 770, 'synset': 'paper_plate.n.01', 'synonyms': ['paper_plate'], 'def': 'a disposable plate made of cardboard', 'name': 'paper_plate'}, {'frequency': 'f', 'id': 771, 'synset': 'paper_towel.n.01', 'synonyms': ['paper_towel'], 'def': 'a disposable towel made of absorbent paper', 'name': 'paper_towel'}, {'frequency': 'r', 'id': 772, 'synset': 'paperback_book.n.01', 'synonyms': ['paperback_book', 'paper-back_book', 'softback_book', 'soft-cover_book'], 'def': 'a book with paper covers', 'name': 'paperback_book'}, {'frequency': 'r', 'id': 773, 'synset': 'paperweight.n.01', 'synonyms': ['paperweight'], 'def': 'a weight used to hold down a stack of papers', 'name': 'paperweight'}, {'frequency': 'c', 'id': 774, 'synset': 'parachute.n.01', 'synonyms': ['parachute'], 'def': 'rescue equipment consisting of a device that fills with air and retards your fall', 'name': 'parachute'}, {'frequency': 'r', 'id': 775, 'synset': 'parakeet.n.01', 'synonyms': ['parakeet', 'parrakeet', 'parroket', 'paraquet', 'paroquet', 'parroquet'], 'def': 'any of numerous small slender long-tailed parrots', 'name': 'parakeet'}, {'frequency': 'c', 'id': 776, 'synset': 'parasail.n.01', 'synonyms': ['parasail_(sports)'], 'def': 'parachute that will lift a person up into the air when it is towed by a motorboat or a car', 'name': 'parasail_(sports)'}, {'frequency': 'r', 'id': 777, 'synset': 'parchment.n.01', 'synonyms': ['parchment'], 'def': 'a superior paper resembling sheepskin', 'name': 'parchment'}, {'frequency': 'r', 'id': 778, 'synset': 'parka.n.01', 'synonyms': ['parka', 'anorak'], 'def': "a kind of heavy jacket (`windcheater' is a British term)", 'name': 'parka'}, {'frequency': 'f', 'id': 779, 'synset': 'parking_meter.n.01', 'synonyms': ['parking_meter'], 'def': 'a coin-operated timer located next to a parking space', 'name': 'parking_meter'}, {'frequency': 'c', 'id': 780, 'synset': 'parrot.n.01', 'synonyms': ['parrot'], 'def': 'usually brightly colored tropical birds with short hooked beaks and the ability to mimic sounds', 'name': 'parrot'}, {'frequency': 'c', 'id': 781, 'synset': 'passenger_car.n.01', 'synonyms': ['passenger_car_(part_of_a_train)', 'coach_(part_of_a_train)'], 'def': 'a railcar where passengers ride', 'name': 'passenger_car_(part_of_a_train)'}, {'frequency': 'r', 'id': 782, 'synset': 'passenger_ship.n.01', 'synonyms': ['passenger_ship'], 'def': 'a ship built to carry passengers', 'name': 'passenger_ship'}, {'frequency': 'r', 'id': 783, 'synset': 'passport.n.02', 'synonyms': ['passport'], 'def': 'a document issued by a country to a citizen allowing that person to travel abroad and re-enter the home country', 'name': 'passport'}, {'frequency': 'f', 'id': 784, 'synset': 'pastry.n.02', 'synonyms': ['pastry'], 'def': 'any of various baked foods made of dough or batter', 'name': 'pastry'}, {'frequency': 'r', 'id': 785, 'synset': 'patty.n.01', 'synonyms': ['patty_(food)'], 'def': 'small flat mass of chopped food', 'name': 'patty_(food)'}, {'frequency': 'c', 'id': 786, 'synset': 'pea.n.01', 'synonyms': ['pea_(food)'], 'def': 'seed of a pea plant used for food', 'name': 'pea_(food)'}, {'frequency': 'c', 'id': 787, 'synset': 'peach.n.03', 'synonyms': ['peach'], 'def': 'downy juicy fruit with sweet yellowish or whitish flesh', 'name': 'peach'}, {'frequency': 'c', 'id': 788, 'synset': 'peanut_butter.n.01', 'synonyms': ['peanut_butter'], 'def': 'a spread made from ground peanuts', 'name': 'peanut_butter'}, {'frequency': 'c', 'id': 789, 'synset': 'pear.n.01', 'synonyms': ['pear'], 'def': 'sweet juicy gritty-textured fruit available in many varieties', 'name': 'pear'}, {'frequency': 'r', 'id': 790, 'synset': 'peeler.n.03', 'synonyms': ['peeler_(tool_for_fruit_and_vegetables)'], 'def': 'a device for peeling vegetables or fruits', 'name': 'peeler_(tool_for_fruit_and_vegetables)'}, {'frequency': 'r', 'id': 791, 'synset': 'pegboard.n.01', 'synonyms': ['pegboard'], 'def': 'a board perforated with regularly spaced holes into which pegs can be fitted', 'name': 'pegboard'}, {'frequency': 'c', 'id': 792, 'synset': 'pelican.n.01', 'synonyms': ['pelican'], 'def': 'large long-winged warm-water seabird having a large bill with a distensible pouch for fish', 'name': 'pelican'}, {'frequency': 'f', 'id': 793, 'synset': 'pen.n.01', 'synonyms': ['pen'], 'def': 'a writing implement with a point from which ink flows', 'name': 'pen'}, {'frequency': 'c', 'id': 794, 'synset': 'pencil.n.01', 'synonyms': ['pencil'], 'def': 'a thin cylindrical pointed writing implement made of wood and graphite', 'name': 'pencil'}, {'frequency': 'r', 'id': 795, 'synset': 'pencil_box.n.01', 'synonyms': ['pencil_box', 'pencil_case'], 'def': 'a box for holding pencils', 'name': 'pencil_box'}, {'frequency': 'r', 'id': 796, 'synset': 'pencil_sharpener.n.01', 'synonyms': ['pencil_sharpener'], 'def': 'a rotary implement for sharpening the point on pencils', 'name': 'pencil_sharpener'}, {'frequency': 'r', 'id': 797, 'synset': 'pendulum.n.01', 'synonyms': ['pendulum'], 'def': 'an apparatus consisting of an object mounted so that it swings freely under the influence of gravity', 'name': 'pendulum'}, {'frequency': 'c', 'id': 798, 'synset': 'penguin.n.01', 'synonyms': ['penguin'], 'def': 'short-legged flightless birds of cold southern regions having webbed feet and wings modified as flippers', 'name': 'penguin'}, {'frequency': 'r', 'id': 799, 'synset': 'pennant.n.02', 'synonyms': ['pennant'], 'def': 'a flag longer than it is wide (and often tapering)', 'name': 'pennant'}, {'frequency': 'r', 'id': 800, 'synset': 'penny.n.02', 'synonyms': ['penny_(coin)'], 'def': 'a coin worth one-hundredth of the value of the basic unit', 'name': 'penny_(coin)'}, {'frequency': 'c', 'id': 801, 'synset': 'pepper.n.03', 'synonyms': ['pepper', 'peppercorn'], 'def': 'pungent seasoning from the berry of the common pepper plant; whole or ground', 'name': 'pepper'}, {'frequency': 'c', 'id': 802, 'synset': 'pepper_mill.n.01', 'synonyms': ['pepper_mill', 'pepper_grinder'], 'def': 'a mill for grinding pepper', 'name': 'pepper_mill'}, {'frequency': 'c', 'id': 803, 'synset': 'perfume.n.02', 'synonyms': ['perfume'], 'def': 'a toiletry that emits and diffuses a fragrant odor', 'name': 'perfume'}, {'frequency': 'r', 'id': 804, 'synset': 'persimmon.n.02', 'synonyms': ['persimmon'], 'def': 'orange fruit resembling a plum; edible when fully ripe', 'name': 'persimmon'}, {'frequency': 'f', 'id': 805, 'synset': 'person.n.01', 'synonyms': ['baby', 'child', 'boy', 'girl', 'man', 'woman', 'person', 'human'], 'def': 'a human being', 'name': 'baby'}, {'frequency': 'r', 'id': 806, 'synset': 'pet.n.01', 'synonyms': ['pet'], 'def': 'a domesticated animal kept for companionship or amusement', 'name': 'pet'}, {'frequency': 'r', 'id': 807, 'synset': 'petfood.n.01', 'synonyms': ['petfood', 'pet-food'], 'def': 'food prepared for animal pets', 'name': 'petfood'}, {'frequency': 'r', 'id': 808, 'synset': 'pew.n.01', 'synonyms': ['pew_(church_bench)', 'church_bench'], 'def': 'long bench with backs; used in church by the congregation', 'name': 'pew_(church_bench)'}, {'frequency': 'r', 'id': 809, 'synset': 'phonebook.n.01', 'synonyms': ['phonebook', 'telephone_book', 'telephone_directory'], 'def': 'a directory containing an alphabetical list of telephone subscribers and their telephone numbers', 'name': 'phonebook'}, {'frequency': 'c', 'id': 810, 'synset': 'phonograph_record.n.01', 'synonyms': ['phonograph_record', 'phonograph_recording', 'record_(phonograph_recording)'], 'def': 'sound recording consisting of a typically black disk with a continuous groove', 'name': 'phonograph_record'}, {'frequency': 'c', 'id': 811, 'synset': 'piano.n.01', 'synonyms': ['piano'], 'def': 'a keyboard instrument that is played by depressing keys that cause hammers to strike tuned strings and produce sounds', 'name': 'piano'}, {'frequency': 'f', 'id': 812, 'synset': 'pickle.n.01', 'synonyms': ['pickle'], 'def': 'vegetables (especially cucumbers) preserved in brine or vinegar', 'name': 'pickle'}, {'frequency': 'f', 'id': 813, 'synset': 'pickup.n.01', 'synonyms': ['pickup_truck'], 'def': 'a light truck with an open body and low sides and a tailboard', 'name': 'pickup_truck'}, {'frequency': 'c', 'id': 814, 'synset': 'pie.n.01', 'synonyms': ['pie'], 'def': 'dish baked in pastry-lined pan often with a pastry top', 'name': 'pie'}, {'frequency': 'c', 'id': 815, 'synset': 'pigeon.n.01', 'synonyms': ['pigeon'], 'def': 'wild and domesticated birds having a heavy body and short legs', 'name': 'pigeon'}, {'frequency': 'r', 'id': 816, 'synset': 'piggy_bank.n.01', 'synonyms': ['piggy_bank', 'penny_bank'], 'def': "a child's coin bank (often shaped like a pig)", 'name': 'piggy_bank'}, {'frequency': 'f', 'id': 817, 'synset': 'pillow.n.01', 'synonyms': ['pillow'], 'def': 'a cushion to support the head of a sleeping person', 'name': 'pillow'}, {'frequency': 'r', 'id': 818, 'synset': 'pin.n.09', 'synonyms': ['pin_(non_jewelry)'], 'def': 'a small slender (often pointed) piece of wood or metal used to support or fasten or attach things', 'name': 'pin_(non_jewelry)'}, {'frequency': 'f', 'id': 819, 'synset': 'pineapple.n.02', 'synonyms': ['pineapple'], 'def': 'large sweet fleshy tropical fruit with a tuft of stiff leaves', 'name': 'pineapple'}, {'frequency': 'c', 'id': 820, 'synset': 'pinecone.n.01', 'synonyms': ['pinecone'], 'def': 'the seed-producing cone of a pine tree', 'name': 'pinecone'}, {'frequency': 'r', 'id': 821, 'synset': 'ping-pong_ball.n.01', 'synonyms': ['ping-pong_ball'], 'def': 'light hollow ball used in playing table tennis', 'name': 'ping-pong_ball'}, {'frequency': 'r', 'id': 822, 'synset': 'pinwheel.n.03', 'synonyms': ['pinwheel'], 'def': 'a toy consisting of vanes of colored paper or plastic that is pinned to a stick and spins when it is pointed into the wind', 'name': 'pinwheel'}, {'frequency': 'r', 'id': 823, 'synset': 'pipe.n.01', 'synonyms': ['tobacco_pipe'], 'def': 'a tube with a small bowl at one end; used for smoking tobacco', 'name': 'tobacco_pipe'}, {'frequency': 'f', 'id': 824, 'synset': 'pipe.n.02', 'synonyms': ['pipe', 'piping'], 'def': 'a long tube made of metal or plastic that is used to carry water or oil or gas etc.', 'name': 'pipe'}, {'frequency': 'r', 'id': 825, 'synset': 'pistol.n.01', 'synonyms': ['pistol', 'handgun'], 'def': 'a firearm that is held and fired with one hand', 'name': 'pistol'}, {'frequency': 'r', 'id': 826, 'synset': 'pita.n.01', 'synonyms': ['pita_(bread)', 'pocket_bread'], 'def': 'usually small round bread that can open into a pocket for filling', 'name': 'pita_(bread)'}, {'frequency': 'f', 'id': 827, 'synset': 'pitcher.n.02', 'synonyms': ['pitcher_(vessel_for_liquid)', 'ewer'], 'def': 'an open vessel with a handle and a spout for pouring', 'name': 'pitcher_(vessel_for_liquid)'}, {'frequency': 'r', 'id': 828, 'synset': 'pitchfork.n.01', 'synonyms': ['pitchfork'], 'def': 'a long-handled hand tool with sharp widely spaced prongs for lifting and pitching hay', 'name': 'pitchfork'}, {'frequency': 'f', 'id': 829, 'synset': 'pizza.n.01', 'synonyms': ['pizza'], 'def': 'Italian open pie made of thin bread dough spread with a spiced mixture of e.g. tomato sauce and cheese', 'name': 'pizza'}, {'frequency': 'f', 'id': 830, 'synset': 'place_mat.n.01', 'synonyms': ['place_mat'], 'def': 'a mat placed on a table for an individual place setting', 'name': 'place_mat'}, {'frequency': 'f', 'id': 831, 'synset': 'plate.n.04', 'synonyms': ['plate'], 'def': 'dish on which food is served or from which food is eaten', 'name': 'plate'}, {'frequency': 'c', 'id': 832, 'synset': 'platter.n.01', 'synonyms': ['platter'], 'def': 'a large shallow dish used for serving food', 'name': 'platter'}, {'frequency': 'r', 'id': 833, 'synset': 'playing_card.n.01', 'synonyms': ['playing_card'], 'def': 'one of a pack of cards that are used to play card games', 'name': 'playing_card'}, {'frequency': 'r', 'id': 834, 'synset': 'playpen.n.01', 'synonyms': ['playpen'], 'def': 'a portable enclosure in which babies may be left to play', 'name': 'playpen'}, {'frequency': 'c', 'id': 835, 'synset': 'pliers.n.01', 'synonyms': ['pliers', 'plyers'], 'def': 'a gripping hand tool with two hinged arms and (usually) serrated jaws', 'name': 'pliers'}, {'frequency': 'r', 'id': 836, 'synset': 'plow.n.01', 'synonyms': ['plow_(farm_equipment)', 'plough_(farm_equipment)'], 'def': 'a farm tool having one or more heavy blades to break the soil and cut a furrow prior to sowing', 'name': 'plow_(farm_equipment)'}, {'frequency': 'r', 'id': 837, 'synset': 'pocket_watch.n.01', 'synonyms': ['pocket_watch'], 'def': 'a watch that is carried in a small watch pocket', 'name': 'pocket_watch'}, {'frequency': 'c', 'id': 838, 'synset': 'pocketknife.n.01', 'synonyms': ['pocketknife'], 'def': 'a knife with a blade that folds into the handle; suitable for carrying in the pocket', 'name': 'pocketknife'}, {'frequency': 'c', 'id': 839, 'synset': 'poker.n.01', 'synonyms': ['poker_(fire_stirring_tool)', 'stove_poker', 'fire_hook'], 'def': 'fire iron consisting of a metal rod with a handle; used to stir a fire', 'name': 'poker_(fire_stirring_tool)'}, {'frequency': 'f', 'id': 840, 'synset': 'pole.n.01', 'synonyms': ['pole', 'post'], 'def': 'a long (usually round) rod of wood or metal or plastic', 'name': 'pole'}, {'frequency': 'r', 'id': 841, 'synset': 'police_van.n.01', 'synonyms': ['police_van', 'police_wagon', 'paddy_wagon', 'patrol_wagon'], 'def': 'van used by police to transport prisoners', 'name': 'police_van'}, {'frequency': 'f', 'id': 842, 'synset': 'polo_shirt.n.01', 'synonyms': ['polo_shirt', 'sport_shirt'], 'def': 'a shirt with short sleeves designed for comfort and casual wear', 'name': 'polo_shirt'}, {'frequency': 'r', 'id': 843, 'synset': 'poncho.n.01', 'synonyms': ['poncho'], 'def': 'a blanket-like cloak with a hole in the center for the head', 'name': 'poncho'}, {'frequency': 'c', 'id': 844, 'synset': 'pony.n.05', 'synonyms': ['pony'], 'def': 'any of various breeds of small gentle horses usually less than five feet high at the shoulder', 'name': 'pony'}, {'frequency': 'r', 'id': 845, 'synset': 'pool_table.n.01', 'synonyms': ['pool_table', 'billiard_table', 'snooker_table'], 'def': 'game equipment consisting of a heavy table on which pool is played', 'name': 'pool_table'}, {'frequency': 'f', 'id': 846, 'synset': 'pop.n.02', 'synonyms': ['pop_(soda)', 'soda_(pop)', 'tonic', 'soft_drink'], 'def': 'a sweet drink containing carbonated water and flavoring', 'name': 'pop_(soda)'}, {'frequency': 'r', 'id': 847, 'synset': 'portrait.n.02', 'synonyms': ['portrait', 'portrayal'], 'def': 'any likeness of a person, in any medium', 'name': 'portrait'}, {'frequency': 'c', 'id': 848, 'synset': 'postbox.n.01', 'synonyms': ['postbox_(public)', 'mailbox_(public)'], 'def': 'public box for deposit of mail', 'name': 'postbox_(public)'}, {'frequency': 'c', 'id': 849, 'synset': 'postcard.n.01', 'synonyms': ['postcard', 'postal_card', 'mailing-card'], 'def': 'a card for sending messages by post without an envelope', 'name': 'postcard'}, {'frequency': 'f', 'id': 850, 'synset': 'poster.n.01', 'synonyms': ['poster', 'placard'], 'def': 'a sign posted in a public place as an advertisement', 'name': 'poster'}, {'frequency': 'f', 'id': 851, 'synset': 'pot.n.01', 'synonyms': ['pot'], 'def': 'metal or earthenware cooking vessel that is usually round and deep; often has a handle and lid', 'name': 'pot'}, {'frequency': 'f', 'id': 852, 'synset': 'pot.n.04', 'synonyms': ['flowerpot'], 'def': 'a container in which plants are cultivated', 'name': 'flowerpot'}, {'frequency': 'f', 'id': 853, 'synset': 'potato.n.01', 'synonyms': ['potato'], 'def': 'an edible tuber native to South America', 'name': 'potato'}, {'frequency': 'c', 'id': 854, 'synset': 'potholder.n.01', 'synonyms': ['potholder'], 'def': 'an insulated pad for holding hot pots', 'name': 'potholder'}, {'frequency': 'c', 'id': 855, 'synset': 'pottery.n.01', 'synonyms': ['pottery', 'clayware'], 'def': 'ceramic ware made from clay and baked in a kiln', 'name': 'pottery'}, {'frequency': 'c', 'id': 856, 'synset': 'pouch.n.01', 'synonyms': ['pouch'], 'def': 'a small or medium size container for holding or carrying things', 'name': 'pouch'}, {'frequency': 'r', 'id': 857, 'synset': 'power_shovel.n.01', 'synonyms': ['power_shovel', 'excavator', 'digger'], 'def': 'a machine for excavating', 'name': 'power_shovel'}, {'frequency': 'c', 'id': 858, 'synset': 'prawn.n.01', 'synonyms': ['prawn', 'shrimp'], 'def': 'any of various edible decapod crustaceans', 'name': 'prawn'}, {'frequency': 'f', 'id': 859, 'synset': 'printer.n.03', 'synonyms': ['printer', 'printing_machine'], 'def': 'a machine that prints', 'name': 'printer'}, {'frequency': 'c', 'id': 860, 'synset': 'projectile.n.01', 'synonyms': ['projectile_(weapon)', 'missile'], 'def': 'a weapon that is forcibly thrown or projected at a targets', 'name': 'projectile_(weapon)'}, {'frequency': 'c', 'id': 861, 'synset': 'projector.n.02', 'synonyms': ['projector'], 'def': 'an optical instrument that projects an enlarged image onto a screen', 'name': 'projector'}, {'frequency': 'f', 'id': 862, 'synset': 'propeller.n.01', 'synonyms': ['propeller', 'propellor'], 'def': 'a mechanical device that rotates to push against air or water', 'name': 'propeller'}, {'frequency': 'r', 'id': 863, 'synset': 'prune.n.01', 'synonyms': ['prune'], 'def': 'dried plum', 'name': 'prune'}, {'frequency': 'r', 'id': 864, 'synset': 'pudding.n.01', 'synonyms': ['pudding'], 'def': 'any of various soft thick unsweetened baked dishes', 'name': 'pudding'}, {'frequency': 'r', 'id': 865, 'synset': 'puffer.n.02', 'synonyms': ['puffer_(fish)', 'pufferfish', 'blowfish', 'globefish'], 'def': 'fishes whose elongated spiny body can inflate itself with water or air to form a globe', 'name': 'puffer_(fish)'}, {'frequency': 'r', 'id': 866, 'synset': 'puffin.n.01', 'synonyms': ['puffin'], 'def': 'seabirds having short necks and brightly colored compressed bills', 'name': 'puffin'}, {'frequency': 'r', 'id': 867, 'synset': 'pug.n.01', 'synonyms': ['pug-dog'], 'def': 'small compact smooth-coated breed of Asiatic origin having a tightly curled tail and broad flat wrinkled muzzle', 'name': 'pug-dog'}, {'frequency': 'c', 'id': 868, 'synset': 'pumpkin.n.02', 'synonyms': ['pumpkin'], 'def': 'usually large pulpy deep-yellow round fruit of the squash family maturing in late summer or early autumn', 'name': 'pumpkin'}, {'frequency': 'r', 'id': 869, 'synset': 'punch.n.03', 'synonyms': ['puncher'], 'def': 'a tool for making holes or indentations', 'name': 'puncher'}, {'frequency': 'r', 'id': 870, 'synset': 'puppet.n.01', 'synonyms': ['puppet', 'marionette'], 'def': 'a small figure of a person operated from above with strings by a puppeteer', 'name': 'puppet'}, {'frequency': 'r', 'id': 871, 'synset': 'puppy.n.01', 'synonyms': ['puppy'], 'def': 'a young dog', 'name': 'puppy'}, {'frequency': 'r', 'id': 872, 'synset': 'quesadilla.n.01', 'synonyms': ['quesadilla'], 'def': 'a tortilla that is filled with cheese and heated', 'name': 'quesadilla'}, {'frequency': 'r', 'id': 873, 'synset': 'quiche.n.02', 'synonyms': ['quiche'], 'def': 'a tart filled with rich unsweetened custard; often contains other ingredients (as cheese or ham or seafood or vegetables)', 'name': 'quiche'}, {'frequency': 'f', 'id': 874, 'synset': 'quilt.n.01', 'synonyms': ['quilt', 'comforter'], 'def': 'bedding made of two layers of cloth filled with stuffing and stitched together', 'name': 'quilt'}, {'frequency': 'c', 'id': 875, 'synset': 'rabbit.n.01', 'synonyms': ['rabbit'], 'def': 'any of various burrowing animals of the family Leporidae having long ears and short tails', 'name': 'rabbit'}, {'frequency': 'r', 'id': 876, 'synset': 'racer.n.02', 'synonyms': ['race_car', 'racing_car'], 'def': 'a fast car that competes in races', 'name': 'race_car'}, {'frequency': 'c', 'id': 877, 'synset': 'racket.n.04', 'synonyms': ['racket', 'racquet'], 'def': 'a sports implement used to strike a ball in various games', 'name': 'racket'}, {'frequency': 'r', 'id': 878, 'synset': 'radar.n.01', 'synonyms': ['radar'], 'def': 'measuring instrument in which the echo of a pulse of microwave radiation is used to detect and locate distant objects', 'name': 'radar'}, {'frequency': 'c', 'id': 879, 'synset': 'radiator.n.03', 'synonyms': ['radiator'], 'def': 'a mechanism consisting of a metal honeycomb through which hot fluids circulate', 'name': 'radiator'}, {'frequency': 'c', 'id': 880, 'synset': 'radio_receiver.n.01', 'synonyms': ['radio_receiver', 'radio_set', 'radio', 'tuner_(radio)'], 'def': 'an electronic receiver that detects and demodulates and amplifies transmitted radio signals', 'name': 'radio_receiver'}, {'frequency': 'c', 'id': 881, 'synset': 'radish.n.03', 'synonyms': ['radish', 'daikon'], 'def': 'pungent edible root of any of various cultivated radish plants', 'name': 'radish'}, {'frequency': 'c', 'id': 882, 'synset': 'raft.n.01', 'synonyms': ['raft'], 'def': 'a flat float (usually made of logs or planks) that can be used for transport or as a platform for swimmers', 'name': 'raft'}, {'frequency': 'r', 'id': 883, 'synset': 'rag_doll.n.01', 'synonyms': ['rag_doll'], 'def': 'a cloth doll that is stuffed and (usually) painted', 'name': 'rag_doll'}, {'frequency': 'c', 'id': 884, 'synset': 'raincoat.n.01', 'synonyms': ['raincoat', 'waterproof_jacket'], 'def': 'a water-resistant coat', 'name': 'raincoat'}, {'frequency': 'c', 'id': 885, 'synset': 'ram.n.05', 'synonyms': ['ram_(animal)'], 'def': 'uncastrated adult male sheep', 'name': 'ram_(animal)'}, {'frequency': 'c', 'id': 886, 'synset': 'raspberry.n.02', 'synonyms': ['raspberry'], 'def': 'red or black edible aggregate berries usually smaller than the related blackberries', 'name': 'raspberry'}, {'frequency': 'r', 'id': 887, 'synset': 'rat.n.01', 'synonyms': ['rat'], 'def': 'any of various long-tailed rodents similar to but larger than a mouse', 'name': 'rat'}, {'frequency': 'c', 'id': 888, 'synset': 'razorblade.n.01', 'synonyms': ['razorblade'], 'def': 'a blade that has very sharp edge', 'name': 'razorblade'}, {'frequency': 'c', 'id': 889, 'synset': 'reamer.n.01', 'synonyms': ['reamer_(juicer)', 'juicer', 'juice_reamer'], 'def': 'a squeezer with a conical ridged center that is used for squeezing juice from citrus fruit', 'name': 'reamer_(juicer)'}, {'frequency': 'f', 'id': 890, 'synset': 'rearview_mirror.n.01', 'synonyms': ['rearview_mirror'], 'def': 'car mirror that reflects the view out of the rear window', 'name': 'rearview_mirror'}, {'frequency': 'c', 'id': 891, 'synset': 'receipt.n.02', 'synonyms': ['receipt'], 'def': 'an acknowledgment (usually tangible) that payment has been made', 'name': 'receipt'}, {'frequency': 'c', 'id': 892, 'synset': 'recliner.n.01', 'synonyms': ['recliner', 'reclining_chair', 'lounger_(chair)'], 'def': 'an armchair whose back can be lowered and foot can be raised to allow the sitter to recline in it', 'name': 'recliner'}, {'frequency': 'r', 'id': 893, 'synset': 'record_player.n.01', 'synonyms': ['record_player', 'phonograph_(record_player)', 'turntable'], 'def': 'machine in which rotating records cause a stylus to vibrate and the vibrations are amplified acoustically or electronically', 'name': 'record_player'}, {'frequency': 'r', 'id': 894, 'synset': 'red_cabbage.n.02', 'synonyms': ['red_cabbage'], 'def': 'compact head of purplish-red leaves', 'name': 'red_cabbage'}, {'frequency': 'f', 'id': 895, 'synset': 'reflector.n.01', 'synonyms': ['reflector'], 'def': 'device that reflects light, radiation, etc.', 'name': 'reflector'}, {'frequency': 'f', 'id': 896, 'synset': 'remote_control.n.01', 'synonyms': ['remote_control'], 'def': 'a device that can be used to control a machine or apparatus from a distance', 'name': 'remote_control'}, {'frequency': 'c', 'id': 897, 'synset': 'rhinoceros.n.01', 'synonyms': ['rhinoceros'], 'def': 'massive powerful herbivorous odd-toed ungulate of southeast Asia and Africa having very thick skin and one or two horns on the snout', 'name': 'rhinoceros'}, {'frequency': 'r', 'id': 898, 'synset': 'rib.n.03', 'synonyms': ['rib_(food)'], 'def': 'cut of meat including one or more ribs', 'name': 'rib_(food)'}, {'frequency': 'r', 'id': 899, 'synset': 'rifle.n.01', 'synonyms': ['rifle'], 'def': 'a shoulder firearm with a long barrel', 'name': 'rifle'}, {'frequency': 'f', 'id': 900, 'synset': 'ring.n.08', 'synonyms': ['ring'], 'def': 'jewelry consisting of a circlet of precious metal (often set with jewels) worn on the finger', 'name': 'ring'}, {'frequency': 'r', 'id': 901, 'synset': 'river_boat.n.01', 'synonyms': ['river_boat'], 'def': 'a boat used on rivers or to ply a river', 'name': 'river_boat'}, {'frequency': 'r', 'id': 902, 'synset': 'road_map.n.02', 'synonyms': ['road_map'], 'def': '(NOT A ROAD) a MAP showing roads (for automobile travel)', 'name': 'road_map'}, {'frequency': 'c', 'id': 903, 'synset': 'robe.n.01', 'synonyms': ['robe'], 'def': 'any loose flowing garment', 'name': 'robe'}, {'frequency': 'c', 'id': 904, 'synset': 'rocking_chair.n.01', 'synonyms': ['rocking_chair'], 'def': 'a chair mounted on rockers', 'name': 'rocking_chair'}, {'frequency': 'r', 'id': 905, 'synset': 'roller_skate.n.01', 'synonyms': ['roller_skate'], 'def': 'a shoe with pairs of rollers (small hard wheels) fixed to the sole', 'name': 'roller_skate'}, {'frequency': 'r', 'id': 906, 'synset': 'rollerblade.n.01', 'synonyms': ['Rollerblade'], 'def': 'an in-line variant of a roller skate', 'name': 'Rollerblade'}, {'frequency': 'c', 'id': 907, 'synset': 'rolling_pin.n.01', 'synonyms': ['rolling_pin'], 'def': 'utensil consisting of a cylinder (usually of wood) with a handle at each end; used to roll out dough', 'name': 'rolling_pin'}, {'frequency': 'r', 'id': 908, 'synset': 'root_beer.n.01', 'synonyms': ['root_beer'], 'def': 'carbonated drink containing extracts of roots and herbs', 'name': 'root_beer'}, {'frequency': 'c', 'id': 909, 'synset': 'router.n.02', 'synonyms': ['router_(computer_equipment)'], 'def': 'a device that forwards data packets between computer networks', 'name': 'router_(computer_equipment)'}, {'frequency': 'f', 'id': 910, 'synset': 'rubber_band.n.01', 'synonyms': ['rubber_band', 'elastic_band'], 'def': 'a narrow band of elastic rubber used to hold things (such as papers) together', 'name': 'rubber_band'}, {'frequency': 'c', 'id': 911, 'synset': 'runner.n.08', 'synonyms': ['runner_(carpet)'], 'def': 'a long narrow carpet', 'name': 'runner_(carpet)'}, {'frequency': 'f', 'id': 912, 'synset': 'sack.n.01', 'synonyms': ['plastic_bag', 'paper_bag'], 'def': "a bag made of paper or plastic for holding customer's purchases", 'name': 'plastic_bag'}, {'frequency': 'f', 'id': 913, 'synset': 'saddle.n.01', 'synonyms': ['saddle_(on_an_animal)'], 'def': 'a seat for the rider of a horse or camel', 'name': 'saddle_(on_an_animal)'}, {'frequency': 'f', 'id': 914, 'synset': 'saddle_blanket.n.01', 'synonyms': ['saddle_blanket', 'saddlecloth', 'horse_blanket'], 'def': 'stable gear consisting of a blanket placed under the saddle', 'name': 'saddle_blanket'}, {'frequency': 'c', 'id': 915, 'synset': 'saddlebag.n.01', 'synonyms': ['saddlebag'], 'def': 'a large bag (or pair of bags) hung over a saddle', 'name': 'saddlebag'}, {'frequency': 'r', 'id': 916, 'synset': 'safety_pin.n.01', 'synonyms': ['safety_pin'], 'def': 'a pin in the form of a clasp; has a guard so the point of the pin will not stick the user', 'name': 'safety_pin'}, {'frequency': 'c', 'id': 917, 'synset': 'sail.n.01', 'synonyms': ['sail'], 'def': 'a large piece of fabric by means of which wind is used to propel a sailing vessel', 'name': 'sail'}, {'frequency': 'c', 'id': 918, 'synset': 'salad.n.01', 'synonyms': ['salad'], 'def': 'food mixtures either arranged on a plate or tossed and served with a moist dressing; usually consisting of or including greens', 'name': 'salad'}, {'frequency': 'r', 'id': 919, 'synset': 'salad_plate.n.01', 'synonyms': ['salad_plate', 'salad_bowl'], 'def': 'a plate or bowl for individual servings of salad', 'name': 'salad_plate'}, {'frequency': 'r', 'id': 920, 'synset': 'salami.n.01', 'synonyms': ['salami'], 'def': 'highly seasoned fatty sausage of pork and beef usually dried', 'name': 'salami'}, {'frequency': 'r', 'id': 921, 'synset': 'salmon.n.01', 'synonyms': ['salmon_(fish)'], 'def': 'any of various large food and game fishes of northern waters', 'name': 'salmon_(fish)'}, {'frequency': 'r', 'id': 922, 'synset': 'salmon.n.03', 'synonyms': ['salmon_(food)'], 'def': 'flesh of any of various marine or freshwater fish of the family Salmonidae', 'name': 'salmon_(food)'}, {'frequency': 'r', 'id': 923, 'synset': 'salsa.n.01', 'synonyms': ['salsa'], 'def': 'spicy sauce of tomatoes and onions and chili peppers to accompany Mexican foods', 'name': 'salsa'}, {'frequency': 'f', 'id': 924, 'synset': 'saltshaker.n.01', 'synonyms': ['saltshaker'], 'def': 'a shaker with a perforated top for sprinkling salt', 'name': 'saltshaker'}, {'frequency': 'f', 'id': 925, 'synset': 'sandal.n.01', 'synonyms': ['sandal_(type_of_shoe)'], 'def': 'a shoe consisting of a sole fastened by straps to the foot', 'name': 'sandal_(type_of_shoe)'}, {'frequency': 'f', 'id': 926, 'synset': 'sandwich.n.01', 'synonyms': ['sandwich'], 'def': 'two (or more) slices of bread with a filling between them', 'name': 'sandwich'}, {'frequency': 'r', 'id': 927, 'synset': 'satchel.n.01', 'synonyms': ['satchel'], 'def': 'luggage consisting of a small case with a flat bottom and (usually) a shoulder strap', 'name': 'satchel'}, {'frequency': 'r', 'id': 928, 'synset': 'saucepan.n.01', 'synonyms': ['saucepan'], 'def': 'a deep pan with a handle; used for stewing or boiling', 'name': 'saucepan'}, {'frequency': 'f', 'id': 929, 'synset': 'saucer.n.02', 'synonyms': ['saucer'], 'def': 'a small shallow dish for holding a cup at the table', 'name': 'saucer'}, {'frequency': 'f', 'id': 930, 'synset': 'sausage.n.01', 'synonyms': ['sausage'], 'def': 'highly seasoned minced meat stuffed in casings', 'name': 'sausage'}, {'frequency': 'r', 'id': 931, 'synset': 'sawhorse.n.01', 'synonyms': ['sawhorse', 'sawbuck'], 'def': 'a framework for holding wood that is being sawed', 'name': 'sawhorse'}, {'frequency': 'r', 'id': 932, 'synset': 'sax.n.02', 'synonyms': ['saxophone'], 'def': "a wind instrument with a `J'-shaped form typically made of brass", 'name': 'saxophone'}, {'frequency': 'f', 'id': 933, 'synset': 'scale.n.07', 'synonyms': ['scale_(measuring_instrument)'], 'def': 'a measuring instrument for weighing; shows amount of mass', 'name': 'scale_(measuring_instrument)'}, {'frequency': 'r', 'id': 934, 'synset': 'scarecrow.n.01', 'synonyms': ['scarecrow', 'strawman'], 'def': 'an effigy in the shape of a man to frighten birds away from seeds', 'name': 'scarecrow'}, {'frequency': 'f', 'id': 935, 'synset': 'scarf.n.01', 'synonyms': ['scarf'], 'def': 'a garment worn around the head or neck or shoulders for warmth or decoration', 'name': 'scarf'}, {'frequency': 'c', 'id': 936, 'synset': 'school_bus.n.01', 'synonyms': ['school_bus'], 'def': 'a bus used to transport children to or from school', 'name': 'school_bus'}, {'frequency': 'f', 'id': 937, 'synset': 'scissors.n.01', 'synonyms': ['scissors'], 'def': 'a tool having two crossed pivoting blades with looped handles', 'name': 'scissors'}, {'frequency': 'c', 'id': 938, 'synset': 'scoreboard.n.01', 'synonyms': ['scoreboard'], 'def': 'a large board for displaying the score of a contest (and some other information)', 'name': 'scoreboard'}, {'frequency': 'c', 'id': 939, 'synset': 'scrambled_eggs.n.01', 'synonyms': ['scrambled_eggs'], 'def': 'eggs beaten and cooked to a soft firm consistency while stirring', 'name': 'scrambled_eggs'}, {'frequency': 'r', 'id': 940, 'synset': 'scraper.n.01', 'synonyms': ['scraper'], 'def': 'any of various hand tools for scraping', 'name': 'scraper'}, {'frequency': 'r', 'id': 941, 'synset': 'scratcher.n.03', 'synonyms': ['scratcher'], 'def': 'a device used for scratching', 'name': 'scratcher'}, {'frequency': 'c', 'id': 942, 'synset': 'screwdriver.n.01', 'synonyms': ['screwdriver'], 'def': 'a hand tool for driving screws; has a tip that fits into the head of a screw', 'name': 'screwdriver'}, {'frequency': 'c', 'id': 943, 'synset': 'scrub_brush.n.01', 'synonyms': ['scrubbing_brush'], 'def': 'a brush with short stiff bristles for heavy cleaning', 'name': 'scrubbing_brush'}, {'frequency': 'c', 'id': 944, 'synset': 'sculpture.n.01', 'synonyms': ['sculpture'], 'def': 'a three-dimensional work of art', 'name': 'sculpture'}, {'frequency': 'r', 'id': 945, 'synset': 'seabird.n.01', 'synonyms': ['seabird', 'seafowl'], 'def': 'a bird that frequents coastal waters and the open ocean: gulls; pelicans; gannets; cormorants; albatrosses; petrels; etc.', 'name': 'seabird'}, {'frequency': 'r', 'id': 946, 'synset': 'seahorse.n.02', 'synonyms': ['seahorse'], 'def': 'small fish with horse-like heads bent sharply downward and curled tails', 'name': 'seahorse'}, {'frequency': 'r', 'id': 947, 'synset': 'seaplane.n.01', 'synonyms': ['seaplane', 'hydroplane'], 'def': 'an airplane that can land on or take off from water', 'name': 'seaplane'}, {'frequency': 'c', 'id': 948, 'synset': 'seashell.n.01', 'synonyms': ['seashell'], 'def': 'the shell of a marine organism', 'name': 'seashell'}, {'frequency': 'r', 'id': 949, 'synset': 'seedling.n.01', 'synonyms': ['seedling'], 'def': 'young plant or tree grown from a seed', 'name': 'seedling'}, {'frequency': 'c', 'id': 950, 'synset': 'serving_dish.n.01', 'synonyms': ['serving_dish'], 'def': 'a dish used for serving food', 'name': 'serving_dish'}, {'frequency': 'r', 'id': 951, 'synset': 'sewing_machine.n.01', 'synonyms': ['sewing_machine'], 'def': 'a textile machine used as a home appliance for sewing', 'name': 'sewing_machine'}, {'frequency': 'r', 'id': 952, 'synset': 'shaker.n.03', 'synonyms': ['shaker'], 'def': 'a container in which something can be shaken', 'name': 'shaker'}, {'frequency': 'c', 'id': 953, 'synset': 'shampoo.n.01', 'synonyms': ['shampoo'], 'def': 'cleansing agent consisting of soaps or detergents used for washing the hair', 'name': 'shampoo'}, {'frequency': 'r', 'id': 954, 'synset': 'shark.n.01', 'synonyms': ['shark'], 'def': 'typically large carnivorous fishes with sharpe teeth', 'name': 'shark'}, {'frequency': 'r', 'id': 955, 'synset': 'sharpener.n.01', 'synonyms': ['sharpener'], 'def': 'any implement that is used to make something (an edge or a point) sharper', 'name': 'sharpener'}, {'frequency': 'r', 'id': 956, 'synset': 'sharpie.n.03', 'synonyms': ['Sharpie'], 'def': 'a pen with indelible ink that will write on any surface', 'name': 'Sharpie'}, {'frequency': 'r', 'id': 957, 'synset': 'shaver.n.03', 'synonyms': ['shaver_(electric)', 'electric_shaver', 'electric_razor'], 'def': 'a razor powered by an electric motor', 'name': 'shaver_(electric)'}, {'frequency': 'c', 'id': 958, 'synset': 'shaving_cream.n.01', 'synonyms': ['shaving_cream', 'shaving_soap'], 'def': 'toiletry consisting that forms a rich lather for softening the beard before shaving', 'name': 'shaving_cream'}, {'frequency': 'r', 'id': 959, 'synset': 'shawl.n.01', 'synonyms': ['shawl'], 'def': 'cloak consisting of an oblong piece of cloth used to cover the head and shoulders', 'name': 'shawl'}, {'frequency': 'r', 'id': 960, 'synset': 'shears.n.01', 'synonyms': ['shears'], 'def': 'large scissors with strong blades', 'name': 'shears'}, {'frequency': 'f', 'id': 961, 'synset': 'sheep.n.01', 'synonyms': ['sheep'], 'def': 'woolly usually horned ruminant mammal related to the goat', 'name': 'sheep'}, {'frequency': 'r', 'id': 962, 'synset': 'shepherd_dog.n.01', 'synonyms': ['shepherd_dog', 'sheepdog'], 'def': 'any of various usually long-haired breeds of dog reared to herd and guard sheep', 'name': 'shepherd_dog'}, {'frequency': 'r', 'id': 963, 'synset': 'sherbert.n.01', 'synonyms': ['sherbert', 'sherbet'], 'def': 'a frozen dessert made primarily of fruit juice and sugar', 'name': 'sherbert'}, {'frequency': 'r', 'id': 964, 'synset': 'shield.n.02', 'synonyms': ['shield'], 'def': 'armor carried on the arm to intercept blows', 'name': 'shield'}, {'frequency': 'f', 'id': 965, 'synset': 'shirt.n.01', 'synonyms': ['shirt'], 'def': 'a garment worn on the upper half of the body', 'name': 'shirt'}, {'frequency': 'f', 'id': 966, 'synset': 'shoe.n.01', 'synonyms': ['shoe', 'sneaker_(type_of_shoe)', 'tennis_shoe'], 'def': 'common footwear covering the foot', 'name': 'shoe'}, {'frequency': 'c', 'id': 967, 'synset': 'shopping_bag.n.01', 'synonyms': ['shopping_bag'], 'def': 'a bag made of plastic or strong paper (often with handles); used to transport goods after shopping', 'name': 'shopping_bag'}, {'frequency': 'c', 'id': 968, 'synset': 'shopping_cart.n.01', 'synonyms': ['shopping_cart'], 'def': 'a handcart that holds groceries or other goods while shopping', 'name': 'shopping_cart'}, {'frequency': 'f', 'id': 969, 'synset': 'short_pants.n.01', 'synonyms': ['short_pants', 'shorts_(clothing)', 'trunks_(clothing)'], 'def': 'trousers that end at or above the knee', 'name': 'short_pants'}, {'frequency': 'r', 'id': 970, 'synset': 'shot_glass.n.01', 'synonyms': ['shot_glass'], 'def': 'a small glass adequate to hold a single swallow of whiskey', 'name': 'shot_glass'}, {'frequency': 'c', 'id': 971, 'synset': 'shoulder_bag.n.01', 'synonyms': ['shoulder_bag'], 'def': 'a large handbag that can be carried by a strap looped over the shoulder', 'name': 'shoulder_bag'}, {'frequency': 'c', 'id': 972, 'synset': 'shovel.n.01', 'synonyms': ['shovel'], 'def': 'a hand tool for lifting loose material such as snow, dirt, etc.', 'name': 'shovel'}, {'frequency': 'f', 'id': 973, 'synset': 'shower.n.01', 'synonyms': ['shower_head'], 'def': 'a plumbing fixture that sprays water over you', 'name': 'shower_head'}, {'frequency': 'f', 'id': 974, 'synset': 'shower_curtain.n.01', 'synonyms': ['shower_curtain'], 'def': 'a curtain that keeps water from splashing out of the shower area', 'name': 'shower_curtain'}, {'frequency': 'r', 'id': 975, 'synset': 'shredder.n.01', 'synonyms': ['shredder_(for_paper)'], 'def': 'a device that shreds documents', 'name': 'shredder_(for_paper)'}, {'frequency': 'r', 'id': 976, 'synset': 'sieve.n.01', 'synonyms': ['sieve', 'screen_(sieve)'], 'def': 'a strainer for separating lumps from powdered material or grading particles', 'name': 'sieve'}, {'frequency': 'f', 'id': 977, 'synset': 'signboard.n.01', 'synonyms': ['signboard'], 'def': 'structure displaying a board on which advertisements can be posted', 'name': 'signboard'}, {'frequency': 'c', 'id': 978, 'synset': 'silo.n.01', 'synonyms': ['silo'], 'def': 'a cylindrical tower used for storing goods', 'name': 'silo'}, {'frequency': 'f', 'id': 979, 'synset': 'sink.n.01', 'synonyms': ['sink'], 'def': 'plumbing fixture consisting of a water basin fixed to a wall or floor and having a drainpipe', 'name': 'sink'}, {'frequency': 'f', 'id': 980, 'synset': 'skateboard.n.01', 'synonyms': ['skateboard'], 'def': 'a board with wheels that is ridden in a standing or crouching position and propelled by foot', 'name': 'skateboard'}, {'frequency': 'c', 'id': 981, 'synset': 'skewer.n.01', 'synonyms': ['skewer'], 'def': 'a long pin for holding meat in position while it is being roasted', 'name': 'skewer'}, {'frequency': 'f', 'id': 982, 'synset': 'ski.n.01', 'synonyms': ['ski'], 'def': 'sports equipment for skiing on snow', 'name': 'ski'}, {'frequency': 'f', 'id': 983, 'synset': 'ski_boot.n.01', 'synonyms': ['ski_boot'], 'def': 'a stiff boot that is fastened to a ski with a ski binding', 'name': 'ski_boot'}, {'frequency': 'f', 'id': 984, 'synset': 'ski_parka.n.01', 'synonyms': ['ski_parka', 'ski_jacket'], 'def': 'a parka to be worn while skiing', 'name': 'ski_parka'}, {'frequency': 'f', 'id': 985, 'synset': 'ski_pole.n.01', 'synonyms': ['ski_pole'], 'def': 'a pole with metal points used as an aid in skiing', 'name': 'ski_pole'}, {'frequency': 'f', 'id': 986, 'synset': 'skirt.n.02', 'synonyms': ['skirt'], 'def': 'a garment hanging from the waist; worn mainly by girls and women', 'name': 'skirt'}, {'frequency': 'c', 'id': 987, 'synset': 'sled.n.01', 'synonyms': ['sled', 'sledge', 'sleigh'], 'def': 'a vehicle or flat object for transportation over snow by sliding or pulled by dogs, etc.', 'name': 'sled'}, {'frequency': 'c', 'id': 988, 'synset': 'sleeping_bag.n.01', 'synonyms': ['sleeping_bag'], 'def': 'large padded bag designed to be slept in outdoors', 'name': 'sleeping_bag'}, {'frequency': 'r', 'id': 989, 'synset': 'sling.n.05', 'synonyms': ['sling_(bandage)', 'triangular_bandage'], 'def': 'bandage to support an injured forearm; slung over the shoulder or neck', 'name': 'sling_(bandage)'}, {'frequency': 'c', 'id': 990, 'synset': 'slipper.n.01', 'synonyms': ['slipper_(footwear)', 'carpet_slipper_(footwear)'], 'def': 'low footwear that can be slipped on and off easily; usually worn indoors', 'name': 'slipper_(footwear)'}, {'frequency': 'r', 'id': 991, 'synset': 'smoothie.n.02', 'synonyms': ['smoothie'], 'def': 'a thick smooth drink consisting of fresh fruit pureed with ice cream or yoghurt or milk', 'name': 'smoothie'}, {'frequency': 'r', 'id': 992, 'synset': 'snake.n.01', 'synonyms': ['snake', 'serpent'], 'def': 'limbless scaly elongate reptile; some are venomous', 'name': 'snake'}, {'frequency': 'f', 'id': 993, 'synset': 'snowboard.n.01', 'synonyms': ['snowboard'], 'def': 'a board that resembles a broad ski or a small surfboard; used in a standing position to slide down snow-covered slopes', 'name': 'snowboard'}, {'frequency': 'c', 'id': 994, 'synset': 'snowman.n.01', 'synonyms': ['snowman'], 'def': 'a figure of a person made of packed snow', 'name': 'snowman'}, {'frequency': 'c', 'id': 995, 'synset': 'snowmobile.n.01', 'synonyms': ['snowmobile'], 'def': 'tracked vehicle for travel on snow having skis in front', 'name': 'snowmobile'}, {'frequency': 'f', 'id': 996, 'synset': 'soap.n.01', 'synonyms': ['soap'], 'def': 'a cleansing agent made from the salts of vegetable or animal fats', 'name': 'soap'}, {'frequency': 'f', 'id': 997, 'synset': 'soccer_ball.n.01', 'synonyms': ['soccer_ball'], 'def': "an inflated ball used in playing soccer (called `football' outside of the United States)", 'name': 'soccer_ball'}, {'frequency': 'f', 'id': 998, 'synset': 'sock.n.01', 'synonyms': ['sock'], 'def': 'cloth covering for the foot; worn inside the shoe; reaches to between the ankle and the knee', 'name': 'sock'}, {'frequency': 'r', 'id': 999, 'synset': 'soda_fountain.n.02', 'synonyms': ['soda_fountain'], 'def': 'an apparatus for dispensing soda water', 'name': 'soda_fountain'}, {'frequency': 'r', 'id': 1000, 'synset': 'soda_water.n.01', 'synonyms': ['carbonated_water', 'club_soda', 'seltzer', 'sparkling_water'], 'def': 'effervescent beverage artificially charged with carbon dioxide', 'name': 'carbonated_water'}, {'frequency': 'f', 'id': 1001, 'synset': 'sofa.n.01', 'synonyms': ['sofa', 'couch', 'lounge'], 'def': 'an upholstered seat for more than one person', 'name': 'sofa'}, {'frequency': 'r', 'id': 1002, 'synset': 'softball.n.01', 'synonyms': ['softball'], 'def': 'ball used in playing softball', 'name': 'softball'}, {'frequency': 'c', 'id': 1003, 'synset': 'solar_array.n.01', 'synonyms': ['solar_array', 'solar_battery', 'solar_panel'], 'def': 'electrical device consisting of a large array of connected solar cells', 'name': 'solar_array'}, {'frequency': 'r', 'id': 1004, 'synset': 'sombrero.n.02', 'synonyms': ['sombrero'], 'def': 'a straw hat with a tall crown and broad brim; worn in American southwest and in Mexico', 'name': 'sombrero'}, {'frequency': 'c', 'id': 1005, 'synset': 'soup.n.01', 'synonyms': ['soup'], 'def': 'liquid food especially of meat or fish or vegetable stock often containing pieces of solid food', 'name': 'soup'}, {'frequency': 'r', 'id': 1006, 'synset': 'soup_bowl.n.01', 'synonyms': ['soup_bowl'], 'def': 'a bowl for serving soup', 'name': 'soup_bowl'}, {'frequency': 'c', 'id': 1007, 'synset': 'soupspoon.n.01', 'synonyms': ['soupspoon'], 'def': 'a spoon with a rounded bowl for eating soup', 'name': 'soupspoon'}, {'frequency': 'c', 'id': 1008, 'synset': 'sour_cream.n.01', 'synonyms': ['sour_cream', 'soured_cream'], 'def': 'soured light cream', 'name': 'sour_cream'}, {'frequency': 'r', 'id': 1009, 'synset': 'soya_milk.n.01', 'synonyms': ['soya_milk', 'soybean_milk', 'soymilk'], 'def': 'a milk substitute containing soybean flour and water; used in some infant formulas and in making tofu', 'name': 'soya_milk'}, {'frequency': 'r', 'id': 1010, 'synset': 'space_shuttle.n.01', 'synonyms': ['space_shuttle'], 'def': "a reusable spacecraft with wings for a controlled descent through the Earth's atmosphere", 'name': 'space_shuttle'}, {'frequency': 'r', 'id': 1011, 'synset': 'sparkler.n.02', 'synonyms': ['sparkler_(fireworks)'], 'def': 'a firework that burns slowly and throws out a shower of sparks', 'name': 'sparkler_(fireworks)'}, {'frequency': 'f', 'id': 1012, 'synset': 'spatula.n.02', 'synonyms': ['spatula'], 'def': 'a hand tool with a thin flexible blade used to mix or spread soft substances', 'name': 'spatula'}, {'frequency': 'r', 'id': 1013, 'synset': 'spear.n.01', 'synonyms': ['spear', 'lance'], 'def': 'a long pointed rod used as a tool or weapon', 'name': 'spear'}, {'frequency': 'f', 'id': 1014, 'synset': 'spectacles.n.01', 'synonyms': ['spectacles', 'specs', 'eyeglasses', 'glasses'], 'def': 'optical instrument consisting of a frame that holds a pair of lenses for correcting defective vision', 'name': 'spectacles'}, {'frequency': 'c', 'id': 1015, 'synset': 'spice_rack.n.01', 'synonyms': ['spice_rack'], 'def': 'a rack for displaying containers filled with spices', 'name': 'spice_rack'}, {'frequency': 'r', 'id': 1016, 'synset': 'spider.n.01', 'synonyms': ['spider'], 'def': 'predatory arachnid with eight legs, two poison fangs, two feelers, and usually two silk-spinning organs at the back end of the body', 'name': 'spider'}, {'frequency': 'c', 'id': 1017, 'synset': 'sponge.n.01', 'synonyms': ['sponge'], 'def': 'a porous mass usable to absorb water typically used for cleaning', 'name': 'sponge'}, {'frequency': 'f', 'id': 1018, 'synset': 'spoon.n.01', 'synonyms': ['spoon'], 'def': 'a piece of cutlery with a shallow bowl-shaped container and a handle', 'name': 'spoon'}, {'frequency': 'c', 'id': 1019, 'synset': 'sportswear.n.01', 'synonyms': ['sportswear', 'athletic_wear', 'activewear'], 'def': 'attire worn for sport or for casual wear', 'name': 'sportswear'}, {'frequency': 'c', 'id': 1020, 'synset': 'spotlight.n.02', 'synonyms': ['spotlight'], 'def': 'a lamp that produces a strong beam of light to illuminate a restricted area; used to focus attention of a stage performer', 'name': 'spotlight'}, {'frequency': 'r', 'id': 1021, 'synset': 'squirrel.n.01', 'synonyms': ['squirrel'], 'def': 'a kind of arboreal rodent having a long bushy tail', 'name': 'squirrel'}, {'frequency': 'c', 'id': 1022, 'synset': 'stapler.n.01', 'synonyms': ['stapler_(stapling_machine)'], 'def': 'a machine that inserts staples into sheets of paper in order to fasten them together', 'name': 'stapler_(stapling_machine)'}, {'frequency': 'r', 'id': 1023, 'synset': 'starfish.n.01', 'synonyms': ['starfish', 'sea_star'], 'def': 'echinoderms characterized by five arms extending from a central disk', 'name': 'starfish'}, {'frequency': 'f', 'id': 1024, 'synset': 'statue.n.01', 'synonyms': ['statue_(sculpture)'], 'def': 'a sculpture representing a human or animal', 'name': 'statue_(sculpture)'}, {'frequency': 'c', 'id': 1025, 'synset': 'steak.n.01', 'synonyms': ['steak_(food)'], 'def': 'a slice of meat cut from the fleshy part of an animal or large fish', 'name': 'steak_(food)'}, {'frequency': 'r', 'id': 1026, 'synset': 'steak_knife.n.01', 'synonyms': ['steak_knife'], 'def': 'a sharp table knife used in eating steak', 'name': 'steak_knife'}, {'frequency': 'r', 'id': 1027, 'synset': 'steamer.n.02', 'synonyms': ['steamer_(kitchen_appliance)'], 'def': 'a cooking utensil that can be used to cook food by steaming it', 'name': 'steamer_(kitchen_appliance)'}, {'frequency': 'f', 'id': 1028, 'synset': 'steering_wheel.n.01', 'synonyms': ['steering_wheel'], 'def': 'a handwheel that is used for steering', 'name': 'steering_wheel'}, {'frequency': 'r', 'id': 1029, 'synset': 'stencil.n.01', 'synonyms': ['stencil'], 'def': 'a sheet of material (metal, plastic, etc.) that has been perforated with a pattern; ink or paint can pass through the perforations to create the printed pattern on the surface below', 'name': 'stencil'}, {'frequency': 'r', 'id': 1030, 'synset': 'step_ladder.n.01', 'synonyms': ['stepladder'], 'def': 'a folding portable ladder hinged at the top', 'name': 'stepladder'}, {'frequency': 'c', 'id': 1031, 'synset': 'step_stool.n.01', 'synonyms': ['step_stool'], 'def': 'a stool that has one or two steps that fold under the seat', 'name': 'step_stool'}, {'frequency': 'c', 'id': 1032, 'synset': 'stereo.n.01', 'synonyms': ['stereo_(sound_system)'], 'def': 'electronic device for playing audio', 'name': 'stereo_(sound_system)'}, {'frequency': 'r', 'id': 1033, 'synset': 'stew.n.02', 'synonyms': ['stew'], 'def': 'food prepared by stewing especially meat or fish with vegetables', 'name': 'stew'}, {'frequency': 'r', 'id': 1034, 'synset': 'stirrer.n.02', 'synonyms': ['stirrer'], 'def': 'an implement used for stirring', 'name': 'stirrer'}, {'frequency': 'f', 'id': 1035, 'synset': 'stirrup.n.01', 'synonyms': ['stirrup'], 'def': "support consisting of metal loops into which rider's feet go", 'name': 'stirrup'}, {'frequency': 'c', 'id': 1036, 'synset': 'stocking.n.01', 'synonyms': ['stockings_(leg_wear)'], 'def': 'close-fitting hosiery to cover the foot and leg; come in matched pairs', 'name': 'stockings_(leg_wear)'}, {'frequency': 'f', 'id': 1037, 'synset': 'stool.n.01', 'synonyms': ['stool'], 'def': 'a simple seat without a back or arms', 'name': 'stool'}, {'frequency': 'f', 'id': 1038, 'synset': 'stop_sign.n.01', 'synonyms': ['stop_sign'], 'def': 'a traffic sign to notify drivers that they must come to a complete stop', 'name': 'stop_sign'}, {'frequency': 'f', 'id': 1039, 'synset': 'stoplight.n.01', 'synonyms': ['brake_light'], 'def': 'a red light on the rear of a motor vehicle that signals when the brakes are applied', 'name': 'brake_light'}, {'frequency': 'f', 'id': 1040, 'synset': 'stove.n.01', 'synonyms': ['stove', 'kitchen_stove', 'range_(kitchen_appliance)', 'kitchen_range', 'cooking_stove'], 'def': 'a kitchen appliance used for cooking food', 'name': 'stove'}, {'frequency': 'c', 'id': 1041, 'synset': 'strainer.n.01', 'synonyms': ['strainer'], 'def': 'a filter to retain larger pieces while smaller pieces and liquids pass through', 'name': 'strainer'}, {'frequency': 'f', 'id': 1042, 'synset': 'strap.n.01', 'synonyms': ['strap'], 'def': 'an elongated strip of material for binding things together or holding', 'name': 'strap'}, {'frequency': 'f', 'id': 1043, 'synset': 'straw.n.04', 'synonyms': ['straw_(for_drinking)', 'drinking_straw'], 'def': 'a thin paper or plastic tube used to suck liquids into the mouth', 'name': 'straw_(for_drinking)'}, {'frequency': 'f', 'id': 1044, 'synset': 'strawberry.n.01', 'synonyms': ['strawberry'], 'def': 'sweet fleshy red fruit', 'name': 'strawberry'}, {'frequency': 'f', 'id': 1045, 'synset': 'street_sign.n.01', 'synonyms': ['street_sign'], 'def': 'a sign visible from the street', 'name': 'street_sign'}, {'frequency': 'f', 'id': 1046, 'synset': 'streetlight.n.01', 'synonyms': ['streetlight', 'street_lamp'], 'def': 'a lamp supported on a lamppost; for illuminating a street', 'name': 'streetlight'}, {'frequency': 'r', 'id': 1047, 'synset': 'string_cheese.n.01', 'synonyms': ['string_cheese'], 'def': 'cheese formed in long strings twisted together', 'name': 'string_cheese'}, {'frequency': 'r', 'id': 1048, 'synset': 'stylus.n.02', 'synonyms': ['stylus'], 'def': 'a pointed tool for writing or drawing or engraving', 'name': 'stylus'}, {'frequency': 'r', 'id': 1049, 'synset': 'subwoofer.n.01', 'synonyms': ['subwoofer'], 'def': 'a loudspeaker that is designed to reproduce very low bass frequencies', 'name': 'subwoofer'}, {'frequency': 'r', 'id': 1050, 'synset': 'sugar_bowl.n.01', 'synonyms': ['sugar_bowl'], 'def': 'a dish in which sugar is served', 'name': 'sugar_bowl'}, {'frequency': 'r', 'id': 1051, 'synset': 'sugarcane.n.01', 'synonyms': ['sugarcane_(plant)'], 'def': 'juicy canes whose sap is a source of molasses and commercial sugar; fresh canes are sometimes chewed for the juice', 'name': 'sugarcane_(plant)'}, {'frequency': 'c', 'id': 1052, 'synset': 'suit.n.01', 'synonyms': ['suit_(clothing)'], 'def': 'a set of garments (usually including a jacket and trousers or skirt) for outerwear all of the same fabric and color', 'name': 'suit_(clothing)'}, {'frequency': 'c', 'id': 1053, 'synset': 'sunflower.n.01', 'synonyms': ['sunflower'], 'def': 'any plant of the genus Helianthus having large flower heads with dark disk florets and showy yellow rays', 'name': 'sunflower'}, {'frequency': 'f', 'id': 1054, 'synset': 'sunglasses.n.01', 'synonyms': ['sunglasses'], 'def': 'spectacles that are darkened or polarized to protect the eyes from the glare of the sun', 'name': 'sunglasses'}, {'frequency': 'c', 'id': 1055, 'synset': 'sunhat.n.01', 'synonyms': ['sunhat'], 'def': 'a hat with a broad brim that protects the face from direct exposure to the sun', 'name': 'sunhat'}, {'frequency': 'r', 'id': 1056, 'synset': 'sunscreen.n.01', 'synonyms': ['sunscreen', 'sunblock'], 'def': 'a cream spread on the skin; contains a chemical to filter out ultraviolet light and so protect from sunburn', 'name': 'sunscreen'}, {'frequency': 'f', 'id': 1057, 'synset': 'surfboard.n.01', 'synonyms': ['surfboard'], 'def': 'a narrow buoyant board for riding surf', 'name': 'surfboard'}, {'frequency': 'c', 'id': 1058, 'synset': 'sushi.n.01', 'synonyms': ['sushi'], 'def': 'rice (with raw fish) wrapped in seaweed', 'name': 'sushi'}, {'frequency': 'c', 'id': 1059, 'synset': 'swab.n.02', 'synonyms': ['mop'], 'def': 'cleaning implement consisting of absorbent material fastened to a handle; for cleaning floors', 'name': 'mop'}, {'frequency': 'c', 'id': 1060, 'synset': 'sweat_pants.n.01', 'synonyms': ['sweat_pants'], 'def': 'loose-fitting trousers with elastic cuffs; worn by athletes', 'name': 'sweat_pants'}, {'frequency': 'c', 'id': 1061, 'synset': 'sweatband.n.02', 'synonyms': ['sweatband'], 'def': 'a band of material tied around the forehead or wrist to absorb sweat', 'name': 'sweatband'}, {'frequency': 'f', 'id': 1062, 'synset': 'sweater.n.01', 'synonyms': ['sweater'], 'def': 'a crocheted or knitted garment covering the upper part of the body', 'name': 'sweater'}, {'frequency': 'f', 'id': 1063, 'synset': 'sweatshirt.n.01', 'synonyms': ['sweatshirt'], 'def': 'cotton knit pullover with long sleeves worn during athletic activity', 'name': 'sweatshirt'}, {'frequency': 'c', 'id': 1064, 'synset': 'sweet_potato.n.02', 'synonyms': ['sweet_potato'], 'def': 'the edible tuberous root of the sweet potato vine', 'name': 'sweet_potato'}, {'frequency': 'f', 'id': 1065, 'synset': 'swimsuit.n.01', 'synonyms': ['swimsuit', 'swimwear', 'bathing_suit', 'swimming_costume', 'bathing_costume', 'swimming_trunks', 'bathing_trunks'], 'def': 'garment worn for swimming', 'name': 'swimsuit'}, {'frequency': 'c', 'id': 1066, 'synset': 'sword.n.01', 'synonyms': ['sword'], 'def': 'a cutting or thrusting weapon that has a long metal blade', 'name': 'sword'}, {'frequency': 'r', 'id': 1067, 'synset': 'syringe.n.01', 'synonyms': ['syringe'], 'def': 'a medical instrument used to inject or withdraw fluids', 'name': 'syringe'}, {'frequency': 'r', 'id': 1068, 'synset': 'tabasco.n.02', 'synonyms': ['Tabasco_sauce'], 'def': 'very spicy sauce (trade name Tabasco) made from fully-aged red peppers', 'name': 'Tabasco_sauce'}, {'frequency': 'r', 'id': 1069, 'synset': 'table-tennis_table.n.01', 'synonyms': ['table-tennis_table', 'ping-pong_table'], 'def': 'a table used for playing table tennis', 'name': 'table-tennis_table'}, {'frequency': 'f', 'id': 1070, 'synset': 'table.n.02', 'synonyms': ['table'], 'def': 'a piece of furniture having a smooth flat top that is usually supported by one or more vertical legs', 'name': 'table'}, {'frequency': 'c', 'id': 1071, 'synset': 'table_lamp.n.01', 'synonyms': ['table_lamp'], 'def': 'a lamp that sits on a table', 'name': 'table_lamp'}, {'frequency': 'f', 'id': 1072, 'synset': 'tablecloth.n.01', 'synonyms': ['tablecloth'], 'def': 'a covering spread over a dining table', 'name': 'tablecloth'}, {'frequency': 'r', 'id': 1073, 'synset': 'tachometer.n.01', 'synonyms': ['tachometer'], 'def': 'measuring instrument for indicating speed of rotation', 'name': 'tachometer'}, {'frequency': 'r', 'id': 1074, 'synset': 'taco.n.02', 'synonyms': ['taco'], 'def': 'a small tortilla cupped around a filling', 'name': 'taco'}, {'frequency': 'f', 'id': 1075, 'synset': 'tag.n.02', 'synonyms': ['tag'], 'def': 'a label associated with something for the purpose of identification or information', 'name': 'tag'}, {'frequency': 'f', 'id': 1076, 'synset': 'taillight.n.01', 'synonyms': ['taillight', 'rear_light'], 'def': 'lamp (usually red) mounted at the rear of a motor vehicle', 'name': 'taillight'}, {'frequency': 'r', 'id': 1077, 'synset': 'tambourine.n.01', 'synonyms': ['tambourine'], 'def': 'a shallow drum with a single drumhead and with metallic disks in the sides', 'name': 'tambourine'}, {'frequency': 'r', 'id': 1078, 'synset': 'tank.n.01', 'synonyms': ['army_tank', 'armored_combat_vehicle', 'armoured_combat_vehicle'], 'def': 'an enclosed armored military vehicle; has a cannon and moves on caterpillar treads', 'name': 'army_tank'}, {'frequency': 'c', 'id': 1079, 'synset': 'tank.n.02', 'synonyms': ['tank_(storage_vessel)', 'storage_tank'], 'def': 'a large (usually metallic) vessel for holding gases or liquids', 'name': 'tank_(storage_vessel)'}, {'frequency': 'f', 'id': 1080, 'synset': 'tank_top.n.01', 'synonyms': ['tank_top_(clothing)'], 'def': 'a tight-fitting sleeveless shirt with wide shoulder straps and low neck and no front opening', 'name': 'tank_top_(clothing)'}, {'frequency': 'c', 'id': 1081, 'synset': 'tape.n.01', 'synonyms': ['tape_(sticky_cloth_or_paper)'], 'def': 'a long thin piece of cloth or paper as used for binding or fastening', 'name': 'tape_(sticky_cloth_or_paper)'}, {'frequency': 'c', 'id': 1082, 'synset': 'tape.n.04', 'synonyms': ['tape_measure', 'measuring_tape'], 'def': 'measuring instrument consisting of a narrow strip (cloth or metal) marked in inches or centimeters and used for measuring lengths', 'name': 'tape_measure'}, {'frequency': 'c', 'id': 1083, 'synset': 'tapestry.n.02', 'synonyms': ['tapestry'], 'def': 'a heavy textile with a woven design; used for curtains and upholstery', 'name': 'tapestry'}, {'frequency': 'f', 'id': 1084, 'synset': 'tarpaulin.n.01', 'synonyms': ['tarp'], 'def': 'waterproofed canvas', 'name': 'tarp'}, {'frequency': 'c', 'id': 1085, 'synset': 'tartan.n.01', 'synonyms': ['tartan', 'plaid'], 'def': 'a cloth having a crisscross design', 'name': 'tartan'}, {'frequency': 'c', 'id': 1086, 'synset': 'tassel.n.01', 'synonyms': ['tassel'], 'def': 'adornment consisting of a bunch of cords fastened at one end', 'name': 'tassel'}, {'frequency': 'r', 'id': 1087, 'synset': 'tea_bag.n.01', 'synonyms': ['tea_bag'], 'def': 'a measured amount of tea in a bag for an individual serving of tea', 'name': 'tea_bag'}, {'frequency': 'c', 'id': 1088, 'synset': 'teacup.n.02', 'synonyms': ['teacup'], 'def': 'a cup from which tea is drunk', 'name': 'teacup'}, {'frequency': 'c', 'id': 1089, 'synset': 'teakettle.n.01', 'synonyms': ['teakettle'], 'def': 'kettle for boiling water to make tea', 'name': 'teakettle'}, {'frequency': 'c', 'id': 1090, 'synset': 'teapot.n.01', 'synonyms': ['teapot'], 'def': 'pot for brewing tea; usually has a spout and handle', 'name': 'teapot'}, {'frequency': 'f', 'id': 1091, 'synset': 'teddy.n.01', 'synonyms': ['teddy_bear'], 'def': "plaything consisting of a child's toy bear (usually plush and stuffed with soft materials)", 'name': 'teddy_bear'}, {'frequency': 'f', 'id': 1092, 'synset': 'telephone.n.01', 'synonyms': ['telephone', 'phone', 'telephone_set'], 'def': 'electronic device for communicating by voice over long distances', 'name': 'telephone'}, {'frequency': 'c', 'id': 1093, 'synset': 'telephone_booth.n.01', 'synonyms': ['telephone_booth', 'phone_booth', 'call_box', 'telephone_box', 'telephone_kiosk'], 'def': 'booth for using a telephone', 'name': 'telephone_booth'}, {'frequency': 'f', 'id': 1094, 'synset': 'telephone_pole.n.01', 'synonyms': ['telephone_pole', 'telegraph_pole', 'telegraph_post'], 'def': 'tall pole supporting telephone wires', 'name': 'telephone_pole'}, {'frequency': 'r', 'id': 1095, 'synset': 'telephoto_lens.n.01', 'synonyms': ['telephoto_lens', 'zoom_lens'], 'def': 'a camera lens that magnifies the image', 'name': 'telephoto_lens'}, {'frequency': 'c', 'id': 1096, 'synset': 'television_camera.n.01', 'synonyms': ['television_camera', 'tv_camera'], 'def': 'television equipment for capturing and recording video', 'name': 'television_camera'}, {'frequency': 'f', 'id': 1097, 'synset': 'television_receiver.n.01', 'synonyms': ['television_set', 'tv', 'tv_set'], 'def': 'an electronic device that receives television signals and displays them on a screen', 'name': 'television_set'}, {'frequency': 'f', 'id': 1098, 'synset': 'tennis_ball.n.01', 'synonyms': ['tennis_ball'], 'def': 'ball about the size of a fist used in playing tennis', 'name': 'tennis_ball'}, {'frequency': 'f', 'id': 1099, 'synset': 'tennis_racket.n.01', 'synonyms': ['tennis_racket'], 'def': 'a racket used to play tennis', 'name': 'tennis_racket'}, {'frequency': 'r', 'id': 1100, 'synset': 'tequila.n.01', 'synonyms': ['tequila'], 'def': 'Mexican liquor made from fermented juices of an agave plant', 'name': 'tequila'}, {'frequency': 'c', 'id': 1101, 'synset': 'thermometer.n.01', 'synonyms': ['thermometer'], 'def': 'measuring instrument for measuring temperature', 'name': 'thermometer'}, {'frequency': 'c', 'id': 1102, 'synset': 'thermos.n.01', 'synonyms': ['thermos_bottle'], 'def': 'vacuum flask that preserves temperature of hot or cold drinks', 'name': 'thermos_bottle'}, {'frequency': 'c', 'id': 1103, 'synset': 'thermostat.n.01', 'synonyms': ['thermostat'], 'def': 'a regulator for automatically regulating temperature by starting or stopping the supply of heat', 'name': 'thermostat'}, {'frequency': 'r', 'id': 1104, 'synset': 'thimble.n.02', 'synonyms': ['thimble'], 'def': 'a small metal cap to protect the finger while sewing; can be used as a small container', 'name': 'thimble'}, {'frequency': 'c', 'id': 1105, 'synset': 'thread.n.01', 'synonyms': ['thread', 'yarn'], 'def': 'a fine cord of twisted fibers (of cotton or silk or wool or nylon etc.) used in sewing and weaving', 'name': 'thread'}, {'frequency': 'c', 'id': 1106, 'synset': 'thumbtack.n.01', 'synonyms': ['thumbtack', 'drawing_pin', 'pushpin'], 'def': 'a tack for attaching papers to a bulletin board or drawing board', 'name': 'thumbtack'}, {'frequency': 'c', 'id': 1107, 'synset': 'tiara.n.01', 'synonyms': ['tiara'], 'def': 'a jeweled headdress worn by women on formal occasions', 'name': 'tiara'}, {'frequency': 'c', 'id': 1108, 'synset': 'tiger.n.02', 'synonyms': ['tiger'], 'def': 'large feline of forests in most of Asia having a tawny coat with black stripes', 'name': 'tiger'}, {'frequency': 'c', 'id': 1109, 'synset': 'tights.n.01', 'synonyms': ['tights_(clothing)', 'leotards'], 'def': 'skintight knit hose covering the body from the waist to the feet worn by acrobats and dancers and as stockings by women and girls', 'name': 'tights_(clothing)'}, {'frequency': 'c', 'id': 1110, 'synset': 'timer.n.01', 'synonyms': ['timer', 'stopwatch'], 'def': 'a timepiece that measures a time interval and signals its end', 'name': 'timer'}, {'frequency': 'f', 'id': 1111, 'synset': 'tinfoil.n.01', 'synonyms': ['tinfoil'], 'def': 'foil made of tin or an alloy of tin and lead', 'name': 'tinfoil'}, {'frequency': 'r', 'id': 1112, 'synset': 'tinsel.n.01', 'synonyms': ['tinsel'], 'def': 'a showy decoration that is basically valueless', 'name': 'tinsel'}, {'frequency': 'f', 'id': 1113, 'synset': 'tissue.n.02', 'synonyms': ['tissue_paper'], 'def': 'a soft thin (usually translucent) paper', 'name': 'tissue_paper'}, {'frequency': 'c', 'id': 1114, 'synset': 'toast.n.01', 'synonyms': ['toast_(food)'], 'def': 'slice of bread that has been toasted', 'name': 'toast_(food)'}, {'frequency': 'f', 'id': 1115, 'synset': 'toaster.n.02', 'synonyms': ['toaster'], 'def': 'a kitchen appliance (usually electric) for toasting bread', 'name': 'toaster'}, {'frequency': 'c', 'id': 1116, 'synset': 'toaster_oven.n.01', 'synonyms': ['toaster_oven'], 'def': 'kitchen appliance consisting of a small electric oven for toasting or warming food', 'name': 'toaster_oven'}, {'frequency': 'f', 'id': 1117, 'synset': 'toilet.n.02', 'synonyms': ['toilet'], 'def': 'a plumbing fixture for defecation and urination', 'name': 'toilet'}, {'frequency': 'f', 'id': 1118, 'synset': 'toilet_tissue.n.01', 'synonyms': ['toilet_tissue', 'toilet_paper', 'bathroom_tissue'], 'def': 'a soft thin absorbent paper for use in toilets', 'name': 'toilet_tissue'}, {'frequency': 'f', 'id': 1119, 'synset': 'tomato.n.01', 'synonyms': ['tomato'], 'def': 'mildly acid red or yellow pulpy fruit eaten as a vegetable', 'name': 'tomato'}, {'frequency': 'c', 'id': 1120, 'synset': 'tongs.n.01', 'synonyms': ['tongs'], 'def': 'any of various devices for taking hold of objects; usually have two hinged legs with handles above and pointed hooks below', 'name': 'tongs'}, {'frequency': 'c', 'id': 1121, 'synset': 'toolbox.n.01', 'synonyms': ['toolbox'], 'def': 'a box or chest or cabinet for holding hand tools', 'name': 'toolbox'}, {'frequency': 'f', 'id': 1122, 'synset': 'toothbrush.n.01', 'synonyms': ['toothbrush'], 'def': 'small brush; has long handle; used to clean teeth', 'name': 'toothbrush'}, {'frequency': 'f', 'id': 1123, 'synset': 'toothpaste.n.01', 'synonyms': ['toothpaste'], 'def': 'a dentifrice in the form of a paste', 'name': 'toothpaste'}, {'frequency': 'c', 'id': 1124, 'synset': 'toothpick.n.01', 'synonyms': ['toothpick'], 'def': 'pick consisting of a small strip of wood or plastic; used to pick food from between the teeth', 'name': 'toothpick'}, {'frequency': 'c', 'id': 1125, 'synset': 'top.n.09', 'synonyms': ['cover'], 'def': 'covering for a hole (especially a hole in the top of a container)', 'name': 'cover'}, {'frequency': 'c', 'id': 1126, 'synset': 'tortilla.n.01', 'synonyms': ['tortilla'], 'def': 'thin unleavened pancake made from cornmeal or wheat flour', 'name': 'tortilla'}, {'frequency': 'c', 'id': 1127, 'synset': 'tow_truck.n.01', 'synonyms': ['tow_truck'], 'def': 'a truck equipped to hoist and pull wrecked cars (or to remove cars from no-parking zones)', 'name': 'tow_truck'}, {'frequency': 'f', 'id': 1128, 'synset': 'towel.n.01', 'synonyms': ['towel'], 'def': 'a rectangular piece of absorbent cloth (or paper) for drying or wiping', 'name': 'towel'}, {'frequency': 'f', 'id': 1129, 'synset': 'towel_rack.n.01', 'synonyms': ['towel_rack', 'towel_rail', 'towel_bar'], 'def': 'a rack consisting of one or more bars on which towels can be hung', 'name': 'towel_rack'}, {'frequency': 'f', 'id': 1130, 'synset': 'toy.n.03', 'synonyms': ['toy'], 'def': 'a device regarded as providing amusement', 'name': 'toy'}, {'frequency': 'c', 'id': 1131, 'synset': 'tractor.n.01', 'synonyms': ['tractor_(farm_equipment)'], 'def': 'a wheeled vehicle with large wheels; used in farming and other applications', 'name': 'tractor_(farm_equipment)'}, {'frequency': 'f', 'id': 1132, 'synset': 'traffic_light.n.01', 'synonyms': ['traffic_light'], 'def': 'a device to control vehicle traffic often consisting of three or more lights', 'name': 'traffic_light'}, {'frequency': 'r', 'id': 1133, 'synset': 'trail_bike.n.01', 'synonyms': ['dirt_bike'], 'def': 'a lightweight motorcycle equipped with rugged tires and suspension for off-road use', 'name': 'dirt_bike'}, {'frequency': 'c', 'id': 1134, 'synset': 'trailer_truck.n.01', 'synonyms': ['trailer_truck', 'tractor_trailer', 'trucking_rig', 'articulated_lorry', 'semi_truck'], 'def': 'a truck consisting of a tractor and trailer together', 'name': 'trailer_truck'}, {'frequency': 'f', 'id': 1135, 'synset': 'train.n.01', 'synonyms': ['train_(railroad_vehicle)', 'railroad_train'], 'def': 'public or private transport provided by a line of railway cars coupled together and drawn by a locomotive', 'name': 'train_(railroad_vehicle)'}, {'frequency': 'r', 'id': 1136, 'synset': 'trampoline.n.01', 'synonyms': ['trampoline'], 'def': 'gymnastic apparatus consisting of a strong canvas sheet attached with springs to a metal frame', 'name': 'trampoline'}, {'frequency': 'f', 'id': 1137, 'synset': 'tray.n.01', 'synonyms': ['tray'], 'def': 'an open receptacle for holding or displaying or serving articles or food', 'name': 'tray'}, {'frequency': 'r', 'id': 1138, 'synset': 'tree_house.n.01', 'synonyms': ['tree_house'], 'def': '(NOT A TREE) a PLAYHOUSE built in the branches of a tree', 'name': 'tree_house'}, {'frequency': 'r', 'id': 1139, 'synset': 'trench_coat.n.01', 'synonyms': ['trench_coat'], 'def': 'a military style raincoat; belted with deep pockets', 'name': 'trench_coat'}, {'frequency': 'r', 'id': 1140, 'synset': 'triangle.n.05', 'synonyms': ['triangle_(musical_instrument)'], 'def': 'a percussion instrument consisting of a metal bar bent in the shape of an open triangle', 'name': 'triangle_(musical_instrument)'}, {'frequency': 'r', 'id': 1141, 'synset': 'tricycle.n.01', 'synonyms': ['tricycle'], 'def': 'a vehicle with three wheels that is moved by foot pedals', 'name': 'tricycle'}, {'frequency': 'c', 'id': 1142, 'synset': 'tripod.n.01', 'synonyms': ['tripod'], 'def': 'a three-legged rack used for support', 'name': 'tripod'}, {'frequency': 'f', 'id': 1143, 'synset': 'trouser.n.01', 'synonyms': ['trousers', 'pants_(clothing)'], 'def': 'a garment extending from the waist to the knee or ankle, covering each leg separately', 'name': 'trousers'}, {'frequency': 'f', 'id': 1144, 'synset': 'truck.n.01', 'synonyms': ['truck'], 'def': 'an automotive vehicle suitable for hauling', 'name': 'truck'}, {'frequency': 'r', 'id': 1145, 'synset': 'truffle.n.03', 'synonyms': ['truffle_(chocolate)', 'chocolate_truffle'], 'def': 'creamy chocolate candy', 'name': 'truffle_(chocolate)'}, {'frequency': 'c', 'id': 1146, 'synset': 'trunk.n.02', 'synonyms': ['trunk'], 'def': 'luggage consisting of a large strong case used when traveling or for storage', 'name': 'trunk'}, {'frequency': 'r', 'id': 1147, 'synset': 'tub.n.02', 'synonyms': ['vat'], 'def': 'a large open vessel for holding or storing liquids', 'name': 'vat'}, {'frequency': 'c', 'id': 1148, 'synset': 'turban.n.01', 'synonyms': ['turban'], 'def': 'a traditional headdress consisting of a long scarf wrapped around the head', 'name': 'turban'}, {'frequency': 'r', 'id': 1149, 'synset': 'turkey.n.01', 'synonyms': ['turkey_(bird)'], 'def': 'large gallinaceous bird with fan-shaped tail; widely domesticated for food', 'name': 'turkey_(bird)'}, {'frequency': 'c', 'id': 1150, 'synset': 'turkey.n.04', 'synonyms': ['turkey_(food)'], 'def': 'flesh of large domesticated fowl usually roasted', 'name': 'turkey_(food)'}, {'frequency': 'r', 'id': 1151, 'synset': 'turnip.n.01', 'synonyms': ['turnip'], 'def': 'widely cultivated plant having a large fleshy edible white or yellow root', 'name': 'turnip'}, {'frequency': 'c', 'id': 1152, 'synset': 'turtle.n.02', 'synonyms': ['turtle'], 'def': 'any of various aquatic and land reptiles having a bony shell and flipper-like limbs for swimming', 'name': 'turtle'}, {'frequency': 'r', 'id': 1153, 'synset': 'turtleneck.n.01', 'synonyms': ['turtleneck_(clothing)', 'polo-neck'], 'def': 'a sweater or jersey with a high close-fitting collar', 'name': 'turtleneck_(clothing)'}, {'frequency': 'r', 'id': 1154, 'synset': 'typewriter.n.01', 'synonyms': ['typewriter'], 'def': 'hand-operated character printer for printing written messages one character at a time', 'name': 'typewriter'}, {'frequency': 'f', 'id': 1155, 'synset': 'umbrella.n.01', 'synonyms': ['umbrella'], 'def': 'a lightweight handheld collapsible canopy', 'name': 'umbrella'}, {'frequency': 'c', 'id': 1156, 'synset': 'underwear.n.01', 'synonyms': ['underwear', 'underclothes', 'underclothing', 'underpants'], 'def': 'undergarment worn next to the skin and under the outer garments', 'name': 'underwear'}, {'frequency': 'r', 'id': 1157, 'synset': 'unicycle.n.01', 'synonyms': ['unicycle'], 'def': 'a vehicle with a single wheel that is driven by pedals', 'name': 'unicycle'}, {'frequency': 'c', 'id': 1158, 'synset': 'urinal.n.01', 'synonyms': ['urinal'], 'def': 'a plumbing fixture (usually attached to the wall) used by men to urinate', 'name': 'urinal'}, {'frequency': 'r', 'id': 1159, 'synset': 'urn.n.01', 'synonyms': ['urn'], 'def': 'a large vase that usually has a pedestal or feet', 'name': 'urn'}, {'frequency': 'c', 'id': 1160, 'synset': 'vacuum.n.04', 'synonyms': ['vacuum_cleaner'], 'def': 'an electrical home appliance that cleans by suction', 'name': 'vacuum_cleaner'}, {'frequency': 'c', 'id': 1161, 'synset': 'valve.n.03', 'synonyms': ['valve'], 'def': 'control consisting of a mechanical device for controlling the flow of a fluid', 'name': 'valve'}, {'frequency': 'f', 'id': 1162, 'synset': 'vase.n.01', 'synonyms': ['vase'], 'def': 'an open jar of glass or porcelain used as an ornament or to hold flowers', 'name': 'vase'}, {'frequency': 'c', 'id': 1163, 'synset': 'vending_machine.n.01', 'synonyms': ['vending_machine'], 'def': 'a slot machine for selling goods', 'name': 'vending_machine'}, {'frequency': 'f', 'id': 1164, 'synset': 'vent.n.01', 'synonyms': ['vent', 'blowhole', 'air_vent'], 'def': 'a hole for the escape of gas or air', 'name': 'vent'}, {'frequency': 'c', 'id': 1165, 'synset': 'videotape.n.01', 'synonyms': ['videotape'], 'def': 'a video recording made on magnetic tape', 'name': 'videotape'}, {'frequency': 'r', 'id': 1166, 'synset': 'vinegar.n.01', 'synonyms': ['vinegar'], 'def': 'sour-tasting liquid produced usually by oxidation of the alcohol in wine or cider and used as a condiment or food preservative', 'name': 'vinegar'}, {'frequency': 'r', 'id': 1167, 'synset': 'violin.n.01', 'synonyms': ['violin', 'fiddle'], 'def': 'bowed stringed instrument that is the highest member of the violin family', 'name': 'violin'}, {'frequency': 'r', 'id': 1168, 'synset': 'vodka.n.01', 'synonyms': ['vodka'], 'def': 'unaged colorless liquor originating in Russia', 'name': 'vodka'}, {'frequency': 'r', 'id': 1169, 'synset': 'volleyball.n.02', 'synonyms': ['volleyball'], 'def': 'an inflated ball used in playing volleyball', 'name': 'volleyball'}, {'frequency': 'r', 'id': 1170, 'synset': 'vulture.n.01', 'synonyms': ['vulture'], 'def': 'any of various large birds of prey having naked heads and weak claws and feeding chiefly on carrion', 'name': 'vulture'}, {'frequency': 'c', 'id': 1171, 'synset': 'waffle.n.01', 'synonyms': ['waffle'], 'def': 'pancake batter baked in a waffle iron', 'name': 'waffle'}, {'frequency': 'r', 'id': 1172, 'synset': 'waffle_iron.n.01', 'synonyms': ['waffle_iron'], 'def': 'a kitchen appliance for baking waffles', 'name': 'waffle_iron'}, {'frequency': 'c', 'id': 1173, 'synset': 'wagon.n.01', 'synonyms': ['wagon'], 'def': 'any of various kinds of wheeled vehicles drawn by an animal or a tractor', 'name': 'wagon'}, {'frequency': 'c', 'id': 1174, 'synset': 'wagon_wheel.n.01', 'synonyms': ['wagon_wheel'], 'def': 'a wheel of a wagon', 'name': 'wagon_wheel'}, {'frequency': 'c', 'id': 1175, 'synset': 'walking_stick.n.01', 'synonyms': ['walking_stick'], 'def': 'a stick carried in the hand for support in walking', 'name': 'walking_stick'}, {'frequency': 'c', 'id': 1176, 'synset': 'wall_clock.n.01', 'synonyms': ['wall_clock'], 'def': 'a clock mounted on a wall', 'name': 'wall_clock'}, {'frequency': 'f', 'id': 1177, 'synset': 'wall_socket.n.01', 'synonyms': ['wall_socket', 'wall_plug', 'electric_outlet', 'electrical_outlet', 'outlet', 'electric_receptacle'], 'def': 'receptacle providing a place in a wiring system where current can be taken to run electrical devices', 'name': 'wall_socket'}, {'frequency': 'c', 'id': 1178, 'synset': 'wallet.n.01', 'synonyms': ['wallet', 'billfold'], 'def': 'a pocket-size case for holding papers and paper money', 'name': 'wallet'}, {'frequency': 'r', 'id': 1179, 'synset': 'walrus.n.01', 'synonyms': ['walrus'], 'def': 'either of two large northern marine mammals having ivory tusks and tough hide over thick blubber', 'name': 'walrus'}, {'frequency': 'r', 'id': 1180, 'synset': 'wardrobe.n.01', 'synonyms': ['wardrobe'], 'def': 'a tall piece of furniture that provides storage space for clothes; has a door and rails or hooks for hanging clothes', 'name': 'wardrobe'}, {'frequency': 'r', 'id': 1181, 'synset': 'wasabi.n.02', 'synonyms': ['wasabi'], 'def': 'the thick green root of the wasabi plant that the Japanese use in cooking and that tastes like strong horseradish', 'name': 'wasabi'}, {'frequency': 'c', 'id': 1182, 'synset': 'washer.n.03', 'synonyms': ['automatic_washer', 'washing_machine'], 'def': 'a home appliance for washing clothes and linens automatically', 'name': 'automatic_washer'}, {'frequency': 'f', 'id': 1183, 'synset': 'watch.n.01', 'synonyms': ['watch', 'wristwatch'], 'def': 'a small, portable timepiece', 'name': 'watch'}, {'frequency': 'f', 'id': 1184, 'synset': 'water_bottle.n.01', 'synonyms': ['water_bottle'], 'def': 'a bottle for holding water', 'name': 'water_bottle'}, {'frequency': 'c', 'id': 1185, 'synset': 'water_cooler.n.01', 'synonyms': ['water_cooler'], 'def': 'a device for cooling and dispensing drinking water', 'name': 'water_cooler'}, {'frequency': 'c', 'id': 1186, 'synset': 'water_faucet.n.01', 'synonyms': ['water_faucet', 'water_tap', 'tap_(water_faucet)'], 'def': 'a faucet for drawing water from a pipe or cask', 'name': 'water_faucet'}, {'frequency': 'r', 'id': 1187, 'synset': 'water_filter.n.01', 'synonyms': ['water_filter'], 'def': 'a filter to remove impurities from the water supply', 'name': 'water_filter'}, {'frequency': 'r', 'id': 1188, 'synset': 'water_heater.n.01', 'synonyms': ['water_heater', 'hot-water_heater'], 'def': 'a heater and storage tank to supply heated water', 'name': 'water_heater'}, {'frequency': 'r', 'id': 1189, 'synset': 'water_jug.n.01', 'synonyms': ['water_jug'], 'def': 'a jug that holds water', 'name': 'water_jug'}, {'frequency': 'r', 'id': 1190, 'synset': 'water_pistol.n.01', 'synonyms': ['water_gun', 'squirt_gun'], 'def': 'plaything consisting of a toy pistol that squirts water', 'name': 'water_gun'}, {'frequency': 'c', 'id': 1191, 'synset': 'water_scooter.n.01', 'synonyms': ['water_scooter', 'sea_scooter', 'jet_ski'], 'def': 'a motorboat resembling a motor scooter (NOT A SURFBOARD OR WATER SKI)', 'name': 'water_scooter'}, {'frequency': 'c', 'id': 1192, 'synset': 'water_ski.n.01', 'synonyms': ['water_ski'], 'def': 'broad ski for skimming over water towed by a speedboat (DO NOT MARK WATER)', 'name': 'water_ski'}, {'frequency': 'c', 'id': 1193, 'synset': 'water_tower.n.01', 'synonyms': ['water_tower'], 'def': 'a large reservoir for water', 'name': 'water_tower'}, {'frequency': 'c', 'id': 1194, 'synset': 'watering_can.n.01', 'synonyms': ['watering_can'], 'def': 'a container with a handle and a spout with a perforated nozzle; used to sprinkle water over plants', 'name': 'watering_can'}, {'frequency': 'c', 'id': 1195, 'synset': 'watermelon.n.02', 'synonyms': ['watermelon'], 'def': 'large oblong or roundish melon with a hard green rind and sweet watery red or occasionally yellowish pulp', 'name': 'watermelon'}, {'frequency': 'f', 'id': 1196, 'synset': 'weathervane.n.01', 'synonyms': ['weathervane', 'vane_(weathervane)', 'wind_vane'], 'def': 'mechanical device attached to an elevated structure; rotates freely to show the direction of the wind', 'name': 'weathervane'}, {'frequency': 'c', 'id': 1197, 'synset': 'webcam.n.01', 'synonyms': ['webcam'], 'def': 'a digital camera designed to take digital photographs and transmit them over the internet', 'name': 'webcam'}, {'frequency': 'c', 'id': 1198, 'synset': 'wedding_cake.n.01', 'synonyms': ['wedding_cake', 'bridecake'], 'def': 'a rich cake with two or more tiers and covered with frosting and decorations; served at a wedding reception', 'name': 'wedding_cake'}, {'frequency': 'c', 'id': 1199, 'synset': 'wedding_ring.n.01', 'synonyms': ['wedding_ring', 'wedding_band'], 'def': 'a ring given to the bride and/or groom at the wedding', 'name': 'wedding_ring'}, {'frequency': 'f', 'id': 1200, 'synset': 'wet_suit.n.01', 'synonyms': ['wet_suit'], 'def': 'a close-fitting garment made of a permeable material; worn in cold water to retain body heat', 'name': 'wet_suit'}, {'frequency': 'f', 'id': 1201, 'synset': 'wheel.n.01', 'synonyms': ['wheel'], 'def': 'a circular frame with spokes (or a solid disc) that can rotate on a shaft or axle', 'name': 'wheel'}, {'frequency': 'c', 'id': 1202, 'synset': 'wheelchair.n.01', 'synonyms': ['wheelchair'], 'def': 'a movable chair mounted on large wheels', 'name': 'wheelchair'}, {'frequency': 'c', 'id': 1203, 'synset': 'whipped_cream.n.01', 'synonyms': ['whipped_cream'], 'def': 'cream that has been beaten until light and fluffy', 'name': 'whipped_cream'}, {'frequency': 'r', 'id': 1204, 'synset': 'whiskey.n.01', 'synonyms': ['whiskey'], 'def': 'a liquor made from fermented mash of grain', 'name': 'whiskey'}, {'frequency': 'r', 'id': 1205, 'synset': 'whistle.n.03', 'synonyms': ['whistle'], 'def': 'a small wind instrument that produces a whistling sound by blowing into it', 'name': 'whistle'}, {'frequency': 'r', 'id': 1206, 'synset': 'wick.n.02', 'synonyms': ['wick'], 'def': 'a loosely woven cord in a candle or oil lamp that is lit on fire', 'name': 'wick'}, {'frequency': 'c', 'id': 1207, 'synset': 'wig.n.01', 'synonyms': ['wig'], 'def': 'hairpiece covering the head and made of real or synthetic hair', 'name': 'wig'}, {'frequency': 'c', 'id': 1208, 'synset': 'wind_chime.n.01', 'synonyms': ['wind_chime'], 'def': 'a decorative arrangement of pieces of metal or glass or pottery that hang together loosely so the wind can cause them to tinkle', 'name': 'wind_chime'}, {'frequency': 'c', 'id': 1209, 'synset': 'windmill.n.01', 'synonyms': ['windmill'], 'def': 'a mill that is powered by the wind', 'name': 'windmill'}, {'frequency': 'c', 'id': 1210, 'synset': 'window_box.n.01', 'synonyms': ['window_box_(for_plants)'], 'def': 'a container for growing plants on a windowsill', 'name': 'window_box_(for_plants)'}, {'frequency': 'f', 'id': 1211, 'synset': 'windshield_wiper.n.01', 'synonyms': ['windshield_wiper', 'windscreen_wiper', 'wiper_(for_windshield/screen)'], 'def': 'a mechanical device that cleans the windshield', 'name': 'windshield_wiper'}, {'frequency': 'c', 'id': 1212, 'synset': 'windsock.n.01', 'synonyms': ['windsock', 'air_sock', 'air-sleeve', 'wind_sleeve', 'wind_cone'], 'def': 'a truncated cloth cone mounted on a mast/pole; shows wind direction', 'name': 'windsock'}, {'frequency': 'f', 'id': 1213, 'synset': 'wine_bottle.n.01', 'synonyms': ['wine_bottle'], 'def': 'a bottle for holding wine', 'name': 'wine_bottle'}, {'frequency': 'r', 'id': 1214, 'synset': 'wine_bucket.n.01', 'synonyms': ['wine_bucket', 'wine_cooler'], 'def': 'a bucket of ice used to chill a bottle of wine', 'name': 'wine_bucket'}, {'frequency': 'f', 'id': 1215, 'synset': 'wineglass.n.01', 'synonyms': ['wineglass'], 'def': 'a glass that has a stem and in which wine is served', 'name': 'wineglass'}, {'frequency': 'r', 'id': 1216, 'synset': 'wing_chair.n.01', 'synonyms': ['wing_chair'], 'def': 'easy chair having wings on each side of a high back', 'name': 'wing_chair'}, {'frequency': 'c', 'id': 1217, 'synset': 'winker.n.02', 'synonyms': ['blinder_(for_horses)'], 'def': 'blinds that prevent a horse from seeing something on either side', 'name': 'blinder_(for_horses)'}, {'frequency': 'c', 'id': 1218, 'synset': 'wok.n.01', 'synonyms': ['wok'], 'def': 'pan with a convex bottom; used for frying in Chinese cooking', 'name': 'wok'}, {'frequency': 'r', 'id': 1219, 'synset': 'wolf.n.01', 'synonyms': ['wolf'], 'def': 'a wild carnivorous mammal of the dog family, living and hunting in packs', 'name': 'wolf'}, {'frequency': 'c', 'id': 1220, 'synset': 'wooden_spoon.n.02', 'synonyms': ['wooden_spoon'], 'def': 'a spoon made of wood', 'name': 'wooden_spoon'}, {'frequency': 'c', 'id': 1221, 'synset': 'wreath.n.01', 'synonyms': ['wreath'], 'def': 'an arrangement of flowers, leaves, or stems fastened in a ring', 'name': 'wreath'}, {'frequency': 'c', 'id': 1222, 'synset': 'wrench.n.03', 'synonyms': ['wrench', 'spanner'], 'def': 'a hand tool that is used to hold or twist a nut or bolt', 'name': 'wrench'}, {'frequency': 'c', 'id': 1223, 'synset': 'wristband.n.01', 'synonyms': ['wristband'], 'def': 'band consisting of a part of a sleeve that covers the wrist', 'name': 'wristband'}, {'frequency': 'f', 'id': 1224, 'synset': 'wristlet.n.01', 'synonyms': ['wristlet', 'wrist_band'], 'def': 'a band or bracelet worn around the wrist', 'name': 'wristlet'}, {'frequency': 'r', 'id': 1225, 'synset': 'yacht.n.01', 'synonyms': ['yacht'], 'def': 'an expensive vessel propelled by sail or power and used for cruising or racing', 'name': 'yacht'}, {'frequency': 'r', 'id': 1226, 'synset': 'yak.n.02', 'synonyms': ['yak'], 'def': 'large long-haired wild ox of Tibet often domesticated', 'name': 'yak'}, {'frequency': 'c', 'id': 1227, 'synset': 'yogurt.n.01', 'synonyms': ['yogurt', 'yoghurt', 'yoghourt'], 'def': 'a custard-like food made from curdled milk', 'name': 'yogurt'}, {'frequency': 'r', 'id': 1228, 'synset': 'yoke.n.07', 'synonyms': ['yoke_(animal_equipment)'], 'def': 'gear joining two animals at the neck; NOT egg yolk', 'name': 'yoke_(animal_equipment)'}, {'frequency': 'f', 'id': 1229, 'synset': 'zebra.n.01', 'synonyms': ['zebra'], 'def': 'any of several fleet black-and-white striped African equines', 'name': 'zebra'}, {'frequency': 'c', 'id': 1230, 'synset': 'zucchini.n.02', 'synonyms': ['zucchini', 'courgette'], 'def': 'small cucumber-shaped vegetable marrow; typically dark green', 'name': 'zucchini'}] # noqa
# fmt: on
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/lvis_v0_5_categories.py
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates.
"""
Note:
For your custom dataset, there is no need to hard-code metadata anywhere in the code.
For example, for COCO-format dataset, metadata will be obtained automatically
when calling `load_coco_json`. For other dataset, metadata may also be obtained in other ways
during loading.
However, we hard-coded metadata for a few common dataset here.
The only goal is to allow users who don't have these dataset to use pre-trained models.
Users don't have to download a COCO json (which contains metadata), in order to visualize a
COCO model (with correct class names and colors).
"""
# All coco categories, together with their nice-looking visualization colors
# It's from https://github.com/cocodataset/panopticapi/blob/master/panoptic_coco_categories.json
COCO_CATEGORIES = [
{"color": [220, 20, 60], "isthing": 1, "id": 1, "name": "person"},
{"color": [119, 11, 32], "isthing": 1, "id": 2, "name": "bicycle"},
{"color": [0, 0, 142], "isthing": 1, "id": 3, "name": "car"},
{"color": [0, 0, 230], "isthing": 1, "id": 4, "name": "motorcycle"},
{"color": [106, 0, 228], "isthing": 1, "id": 5, "name": "airplane"},
{"color": [0, 60, 100], "isthing": 1, "id": 6, "name": "bus"},
{"color": [0, 80, 100], "isthing": 1, "id": 7, "name": "train"},
{"color": [0, 0, 70], "isthing": 1, "id": 8, "name": "truck"},
{"color": [0, 0, 192], "isthing": 1, "id": 9, "name": "boat"},
{"color": [250, 170, 30], "isthing": 1, "id": 10, "name": "traffic light"},
{"color": [100, 170, 30], "isthing": 1, "id": 11, "name": "fire hydrant"},
{"color": [220, 220, 0], "isthing": 1, "id": 13, "name": "stop sign"},
{"color": [175, 116, 175], "isthing": 1, "id": 14, "name": "parking meter"},
{"color": [250, 0, 30], "isthing": 1, "id": 15, "name": "bench"},
{"color": [165, 42, 42], "isthing": 1, "id": 16, "name": "bird"},
{"color": [255, 77, 255], "isthing": 1, "id": 17, "name": "cat"},
{"color": [0, 226, 252], "isthing": 1, "id": 18, "name": "dog"},
{"color": [182, 182, 255], "isthing": 1, "id": 19, "name": "horse"},
{"color": [0, 82, 0], "isthing": 1, "id": 20, "name": "sheep"},
{"color": [120, 166, 157], "isthing": 1, "id": 21, "name": "cow"},
{"color": [110, 76, 0], "isthing": 1, "id": 22, "name": "elephant"},
{"color": [174, 57, 255], "isthing": 1, "id": 23, "name": "bear"},
{"color": [199, 100, 0], "isthing": 1, "id": 24, "name": "zebra"},
{"color": [72, 0, 118], "isthing": 1, "id": 25, "name": "giraffe"},
{"color": [255, 179, 240], "isthing": 1, "id": 27, "name": "backpack"},
{"color": [0, 125, 92], "isthing": 1, "id": 28, "name": "umbrella"},
{"color": [209, 0, 151], "isthing": 1, "id": 31, "name": "handbag"},
{"color": [188, 208, 182], "isthing": 1, "id": 32, "name": "tie"},
{"color": [0, 220, 176], "isthing": 1, "id": 33, "name": "suitcase"},
{"color": [255, 99, 164], "isthing": 1, "id": 34, "name": "frisbee"},
{"color": [92, 0, 73], "isthing": 1, "id": 35, "name": "skis"},
{"color": [133, 129, 255], "isthing": 1, "id": 36, "name": "snowboard"},
{"color": [78, 180, 255], "isthing": 1, "id": 37, "name": "sports ball"},
{"color": [0, 228, 0], "isthing": 1, "id": 38, "name": "kite"},
{"color": [174, 255, 243], "isthing": 1, "id": 39, "name": "baseball bat"},
{"color": [45, 89, 255], "isthing": 1, "id": 40, "name": "baseball glove"},
{"color": [134, 134, 103], "isthing": 1, "id": 41, "name": "skateboard"},
{"color": [145, 148, 174], "isthing": 1, "id": 42, "name": "surfboard"},
{"color": [255, 208, 186], "isthing": 1, "id": 43, "name": "tennis racket"},
{"color": [197, 226, 255], "isthing": 1, "id": 44, "name": "bottle"},
{"color": [171, 134, 1], "isthing": 1, "id": 46, "name": "wine glass"},
{"color": [109, 63, 54], "isthing": 1, "id": 47, "name": "cup"},
{"color": [207, 138, 255], "isthing": 1, "id": 48, "name": "fork"},
{"color": [151, 0, 95], "isthing": 1, "id": 49, "name": "knife"},
{"color": [9, 80, 61], "isthing": 1, "id": 50, "name": "spoon"},
{"color": [84, 105, 51], "isthing": 1, "id": 51, "name": "bowl"},
{"color": [74, 65, 105], "isthing": 1, "id": 52, "name": "banana"},
{"color": [166, 196, 102], "isthing": 1, "id": 53, "name": "apple"},
{"color": [208, 195, 210], "isthing": 1, "id": 54, "name": "sandwich"},
{"color": [255, 109, 65], "isthing": 1, "id": 55, "name": "orange"},
{"color": [0, 143, 149], "isthing": 1, "id": 56, "name": "broccoli"},
{"color": [179, 0, 194], "isthing": 1, "id": 57, "name": "carrot"},
{"color": [209, 99, 106], "isthing": 1, "id": 58, "name": "hot dog"},
{"color": [5, 121, 0], "isthing": 1, "id": 59, "name": "pizza"},
{"color": [227, 255, 205], "isthing": 1, "id": 60, "name": "donut"},
{"color": [147, 186, 208], "isthing": 1, "id": 61, "name": "cake"},
{"color": [153, 69, 1], "isthing": 1, "id": 62, "name": "chair"},
{"color": [3, 95, 161], "isthing": 1, "id": 63, "name": "couch"},
{"color": [163, 255, 0], "isthing": 1, "id": 64, "name": "potted plant"},
{"color": [119, 0, 170], "isthing": 1, "id": 65, "name": "bed"},
{"color": [0, 182, 199], "isthing": 1, "id": 67, "name": "dining table"},
{"color": [0, 165, 120], "isthing": 1, "id": 70, "name": "toilet"},
{"color": [183, 130, 88], "isthing": 1, "id": 72, "name": "tv"},
{"color": [95, 32, 0], "isthing": 1, "id": 73, "name": "laptop"},
{"color": [130, 114, 135], "isthing": 1, "id": 74, "name": "mouse"},
{"color": [110, 129, 133], "isthing": 1, "id": 75, "name": "remote"},
{"color": [166, 74, 118], "isthing": 1, "id": 76, "name": "keyboard"},
{"color": [219, 142, 185], "isthing": 1, "id": 77, "name": "cell phone"},
{"color": [79, 210, 114], "isthing": 1, "id": 78, "name": "microwave"},
{"color": [178, 90, 62], "isthing": 1, "id": 79, "name": "oven"},
{"color": [65, 70, 15], "isthing": 1, "id": 80, "name": "toaster"},
{"color": [127, 167, 115], "isthing": 1, "id": 81, "name": "sink"},
{"color": [59, 105, 106], "isthing": 1, "id": 82, "name": "refrigerator"},
{"color": [142, 108, 45], "isthing": 1, "id": 84, "name": "book"},
{"color": [196, 172, 0], "isthing": 1, "id": 85, "name": "clock"},
{"color": [95, 54, 80], "isthing": 1, "id": 86, "name": "vase"},
{"color": [128, 76, 255], "isthing": 1, "id": 87, "name": "scissors"},
{"color": [201, 57, 1], "isthing": 1, "id": 88, "name": "teddy bear"},
{"color": [246, 0, 122], "isthing": 1, "id": 89, "name": "hair drier"},
{"color": [191, 162, 208], "isthing": 1, "id": 90, "name": "toothbrush"},
{"color": [255, 255, 128], "isthing": 0, "id": 92, "name": "banner"},
{"color": [147, 211, 203], "isthing": 0, "id": 93, "name": "blanket"},
{"color": [150, 100, 100], "isthing": 0, "id": 95, "name": "bridge"},
{"color": [168, 171, 172], "isthing": 0, "id": 100, "name": "cardboard"},
{"color": [146, 112, 198], "isthing": 0, "id": 107, "name": "counter"},
{"color": [210, 170, 100], "isthing": 0, "id": 109, "name": "curtain"},
{"color": [92, 136, 89], "isthing": 0, "id": 112, "name": "door-stuff"},
{"color": [218, 88, 184], "isthing": 0, "id": 118, "name": "floor-wood"},
{"color": [241, 129, 0], "isthing": 0, "id": 119, "name": "flower"},
{"color": [217, 17, 255], "isthing": 0, "id": 122, "name": "fruit"},
{"color": [124, 74, 181], "isthing": 0, "id": 125, "name": "gravel"},
{"color": [70, 70, 70], "isthing": 0, "id": 128, "name": "house"},
{"color": [255, 228, 255], "isthing": 0, "id": 130, "name": "light"},
{"color": [154, 208, 0], "isthing": 0, "id": 133, "name": "mirror-stuff"},
{"color": [193, 0, 92], "isthing": 0, "id": 138, "name": "net"},
{"color": [76, 91, 113], "isthing": 0, "id": 141, "name": "pillow"},
{"color": [255, 180, 195], "isthing": 0, "id": 144, "name": "platform"},
{"color": [106, 154, 176], "isthing": 0, "id": 145, "name": "playingfield"},
{"color": [230, 150, 140], "isthing": 0, "id": 147, "name": "railroad"},
{"color": [60, 143, 255], "isthing": 0, "id": 148, "name": "river"},
{"color": [128, 64, 128], "isthing": 0, "id": 149, "name": "road"},
{"color": [92, 82, 55], "isthing": 0, "id": 151, "name": "roof"},
{"color": [254, 212, 124], "isthing": 0, "id": 154, "name": "sand"},
{"color": [73, 77, 174], "isthing": 0, "id": 155, "name": "sea"},
{"color": [255, 160, 98], "isthing": 0, "id": 156, "name": "shelf"},
{"color": [255, 255, 255], "isthing": 0, "id": 159, "name": "snow"},
{"color": [104, 84, 109], "isthing": 0, "id": 161, "name": "stairs"},
{"color": [169, 164, 131], "isthing": 0, "id": 166, "name": "tent"},
{"color": [225, 199, 255], "isthing": 0, "id": 168, "name": "towel"},
{"color": [137, 54, 74], "isthing": 0, "id": 171, "name": "wall-brick"},
{"color": [135, 158, 223], "isthing": 0, "id": 175, "name": "wall-stone"},
{"color": [7, 246, 231], "isthing": 0, "id": 176, "name": "wall-tile"},
{"color": [107, 255, 200], "isthing": 0, "id": 177, "name": "wall-wood"},
{"color": [58, 41, 149], "isthing": 0, "id": 178, "name": "water-other"},
{"color": [183, 121, 142], "isthing": 0, "id": 180, "name": "window-blind"},
{"color": [255, 73, 97], "isthing": 0, "id": 181, "name": "window-other"},
{"color": [107, 142, 35], "isthing": 0, "id": 184, "name": "tree-merged"},
{"color": [190, 153, 153], "isthing": 0, "id": 185, "name": "fence-merged"},
{"color": [146, 139, 141], "isthing": 0, "id": 186, "name": "ceiling-merged"},
{"color": [70, 130, 180], "isthing": 0, "id": 187, "name": "sky-other-merged"},
{"color": [134, 199, 156], "isthing": 0, "id": 188, "name": "cabinet-merged"},
{"color": [209, 226, 140], "isthing": 0, "id": 189, "name": "table-merged"},
{"color": [96, 36, 108], "isthing": 0, "id": 190, "name": "floor-other-merged"},
{"color": [96, 96, 96], "isthing": 0, "id": 191, "name": "pavement-merged"},
{"color": [64, 170, 64], "isthing": 0, "id": 192, "name": "mountain-merged"},
{"color": [152, 251, 152], "isthing": 0, "id": 193, "name": "grass-merged"},
{"color": [208, 229, 228], "isthing": 0, "id": 194, "name": "dirt-merged"},
{"color": [206, 186, 171], "isthing": 0, "id": 195, "name": "paper-merged"},
{"color": [152, 161, 64], "isthing": 0, "id": 196, "name": "food-other-merged"},
{"color": [116, 112, 0], "isthing": 0, "id": 197, "name": "building-other-merged"},
{"color": [0, 114, 143], "isthing": 0, "id": 198, "name": "rock-merged"},
{"color": [102, 102, 156], "isthing": 0, "id": 199, "name": "wall-other-merged"},
{"color": [250, 141, 255], "isthing": 0, "id": 200, "name": "rug-merged"},
]
# fmt: off
COCO_PERSON_KEYPOINT_NAMES = (
"nose",
"left_eye", "right_eye",
"left_ear", "right_ear",
"left_shoulder", "right_shoulder",
"left_elbow", "right_elbow",
"left_wrist", "right_wrist",
"left_hip", "right_hip",
"left_knee", "right_knee",
"left_ankle", "right_ankle",
)
# fmt: on
# Pairs of keypoints that should be exchanged under horizontal flipping
COCO_PERSON_KEYPOINT_FLIP_MAP = (
("left_eye", "right_eye"),
("left_ear", "right_ear"),
("left_shoulder", "right_shoulder"),
("left_elbow", "right_elbow"),
("left_wrist", "right_wrist"),
("left_hip", "right_hip"),
("left_knee", "right_knee"),
("left_ankle", "right_ankle"),
)
# rules for pairs of keypoints to draw a line between, and the line color to use.
KEYPOINT_CONNECTION_RULES = [
# face
("left_ear", "left_eye", (102, 204, 255)),
("right_ear", "right_eye", (51, 153, 255)),
("left_eye", "nose", (102, 0, 204)),
("nose", "right_eye", (51, 102, 255)),
# upper-body
("left_shoulder", "right_shoulder", (255, 128, 0)),
("left_shoulder", "left_elbow", (153, 255, 204)),
("right_shoulder", "right_elbow", (128, 229, 255)),
("left_elbow", "left_wrist", (153, 255, 153)),
("right_elbow", "right_wrist", (102, 255, 224)),
# lower-body
("left_hip", "right_hip", (255, 102, 0)),
("left_hip", "left_knee", (255, 255, 77)),
("right_hip", "right_knee", (153, 255, 204)),
("left_knee", "left_ankle", (191, 255, 128)),
("right_knee", "right_ankle", (255, 195, 77)),
]
# All Cityscapes categories, together with their nice-looking visualization colors
# It's from https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/helpers/labels.py # noqa
CITYSCAPES_CATEGORIES = [
{"color": (128, 64, 128), "isthing": 0, "id": 7, "trainId": 0, "name": "road"},
{"color": (244, 35, 232), "isthing": 0, "id": 8, "trainId": 1, "name": "sidewalk"},
{"color": (70, 70, 70), "isthing": 0, "id": 11, "trainId": 2, "name": "building"},
{"color": (102, 102, 156), "isthing": 0, "id": 12, "trainId": 3, "name": "wall"},
{"color": (190, 153, 153), "isthing": 0, "id": 13, "trainId": 4, "name": "fence"},
{"color": (153, 153, 153), "isthing": 0, "id": 17, "trainId": 5, "name": "pole"},
{"color": (250, 170, 30), "isthing": 0, "id": 19, "trainId": 6, "name": "traffic light"},
{"color": (220, 220, 0), "isthing": 0, "id": 20, "trainId": 7, "name": "traffic sign"},
{"color": (107, 142, 35), "isthing": 0, "id": 21, "trainId": 8, "name": "vegetation"},
{"color": (152, 251, 152), "isthing": 0, "id": 22, "trainId": 9, "name": "terrain"},
{"color": (70, 130, 180), "isthing": 0, "id": 23, "trainId": 10, "name": "sky"},
{"color": (220, 20, 60), "isthing": 1, "id": 24, "trainId": 11, "name": "person"},
{"color": (255, 0, 0), "isthing": 1, "id": 25, "trainId": 12, "name": "rider"},
{"color": (0, 0, 142), "isthing": 1, "id": 26, "trainId": 13, "name": "car"},
{"color": (0, 0, 70), "isthing": 1, "id": 27, "trainId": 14, "name": "truck"},
{"color": (0, 60, 100), "isthing": 1, "id": 28, "trainId": 15, "name": "bus"},
{"color": (0, 80, 100), "isthing": 1, "id": 31, "trainId": 16, "name": "train"},
{"color": (0, 0, 230), "isthing": 1, "id": 32, "trainId": 17, "name": "motorcycle"},
{"color": (119, 11, 32), "isthing": 1, "id": 33, "trainId": 18, "name": "bicycle"},
]
# fmt: off
ADE20K_SEM_SEG_CATEGORIES = [
"wall", "building", "sky", "floor", "tree", "ceiling", "road, route", "bed", "window ", "grass", "cabinet", "sidewalk, pavement", "person", "earth, ground", "door", "table", "mountain, mount", "plant", "curtain", "chair", "car", "water", "painting, picture", "sofa", "shelf", "house", "sea", "mirror", "rug", "field", "armchair", "seat", "fence", "desk", "rock, stone", "wardrobe, closet, press", "lamp", "tub", "rail", "cushion", "base, pedestal, stand", "box", "column, pillar", "signboard, sign", "chest of drawers, chest, bureau, dresser", "counter", "sand", "sink", "skyscraper", "fireplace", "refrigerator, icebox", "grandstand, covered stand", "path", "stairs", "runway", "case, display case, showcase, vitrine", "pool table, billiard table, snooker table", "pillow", "screen door, screen", "stairway, staircase", "river", "bridge, span", "bookcase", "blind, screen", "coffee table", "toilet, can, commode, crapper, pot, potty, stool, throne", "flower", "book", "hill", "bench", "countertop", "stove", "palm, palm tree", "kitchen island", "computer", "swivel chair", "boat", "bar", "arcade machine", "hovel, hut, hutch, shack, shanty", "bus", "towel", "light", "truck", "tower", "chandelier", "awning, sunshade, sunblind", "street lamp", "booth", "tv", "plane", "dirt track", "clothes", "pole", "land, ground, soil", "bannister, banister, balustrade, balusters, handrail", "escalator, moving staircase, moving stairway", "ottoman, pouf, pouffe, puff, hassock", "bottle", "buffet, counter, sideboard", "poster, posting, placard, notice, bill, card", "stage", "van", "ship", "fountain", "conveyer belt, conveyor belt, conveyer, conveyor, transporter", "canopy", "washer, automatic washer, washing machine", "plaything, toy", "pool", "stool", "barrel, cask", "basket, handbasket", "falls", "tent", "bag", "minibike, motorbike", "cradle", "oven", "ball", "food, solid food", "step, stair", "tank, storage tank", "trade name", "microwave", "pot", "animal", "bicycle", "lake", "dishwasher", "screen", "blanket, cover", "sculpture", "hood, exhaust hood", "sconce", "vase", "traffic light", "tray", "trash can", "fan", "pier", "crt screen", "plate", "monitor", "bulletin board", "shower", "radiator", "glass, drinking glass", "clock", "flag", # noqa
]
# After processed by `prepare_ade20k_sem_seg.py`, id 255 means ignore
# fmt: on
def _get_coco_instances_meta():
thing_ids = [k["id"] for k in COCO_CATEGORIES if k["isthing"] == 1]
thing_colors = [k["color"] for k in COCO_CATEGORIES if k["isthing"] == 1]
assert len(thing_ids) == 80, len(thing_ids)
# Mapping from the incontiguous COCO category id to an id in [0, 79]
thing_dataset_id_to_contiguous_id = {k: i for i, k in enumerate(thing_ids)}
thing_classes = [k["name"] for k in COCO_CATEGORIES if k["isthing"] == 1]
ret = {
"thing_dataset_id_to_contiguous_id": thing_dataset_id_to_contiguous_id,
"thing_classes": thing_classes,
"thing_colors": thing_colors,
}
return ret
def _get_coco_panoptic_separated_meta():
"""
Returns metadata for "separated" version of the panoptic segmentation dataset.
"""
stuff_ids = [k["id"] for k in COCO_CATEGORIES if k["isthing"] == 0]
assert len(stuff_ids) == 53, len(stuff_ids)
# For semantic segmentation, this mapping maps from contiguous stuff id
# (in [0, 53], used in models) to ids in the dataset (used for processing results)
# The id 0 is mapped to an extra category "thing".
stuff_dataset_id_to_contiguous_id = {k: i + 1 for i, k in enumerate(stuff_ids)}
# When converting COCO panoptic annotations to semantic annotations
# We label the "thing" category to 0
stuff_dataset_id_to_contiguous_id[0] = 0
# 54 names for COCO stuff categories (including "things")
stuff_classes = ["things"] + [
k["name"].replace("-other", "").replace("-merged", "")
for k in COCO_CATEGORIES
if k["isthing"] == 0
]
# NOTE: I randomly picked a color for things
stuff_colors = [[82, 18, 128]] + [k["color"] for k in COCO_CATEGORIES if k["isthing"] == 0]
ret = {
"stuff_dataset_id_to_contiguous_id": stuff_dataset_id_to_contiguous_id,
"stuff_classes": stuff_classes,
"stuff_colors": stuff_colors,
}
ret.update(_get_coco_instances_meta())
return ret
def _get_builtin_metadata(dataset_name):
if dataset_name == "coco":
return _get_coco_instances_meta()
if dataset_name == "coco_panoptic_separated":
return _get_coco_panoptic_separated_meta()
elif dataset_name == "coco_panoptic_standard":
meta = {}
# The following metadata maps contiguous id from [0, #thing categories +
# #stuff categories) to their names and colors. We have to replica of the
# same name and color under "thing_*" and "stuff_*" because the current
# visualization function in D2 handles thing and class classes differently
# due to some heuristic used in Panoptic FPN. We keep the same naming to
# enable reusing existing visualization functions.
thing_classes = [k["name"] for k in COCO_CATEGORIES]
thing_colors = [k["color"] for k in COCO_CATEGORIES]
stuff_classes = [k["name"] for k in COCO_CATEGORIES]
stuff_colors = [k["color"] for k in COCO_CATEGORIES]
meta["thing_classes"] = thing_classes
meta["thing_colors"] = thing_colors
meta["stuff_classes"] = stuff_classes
meta["stuff_colors"] = stuff_colors
# Convert category id for training:
# category id: like semantic segmentation, it is the class id for each
# pixel. Since there are some classes not used in evaluation, the category
# id is not always contiguous and thus we have two set of category ids:
# - original category id: category id in the original dataset, mainly
# used for evaluation.
# - contiguous category id: [0, #classes), in order to train the linear
# softmax classifier.
thing_dataset_id_to_contiguous_id = {}
stuff_dataset_id_to_contiguous_id = {}
for i, cat in enumerate(COCO_CATEGORIES):
if cat["isthing"]:
thing_dataset_id_to_contiguous_id[cat["id"]] = i
else:
stuff_dataset_id_to_contiguous_id[cat["id"]] = i
meta["thing_dataset_id_to_contiguous_id"] = thing_dataset_id_to_contiguous_id
meta["stuff_dataset_id_to_contiguous_id"] = stuff_dataset_id_to_contiguous_id
return meta
elif dataset_name == "coco_person":
return {
"thing_classes": ["person"],
"keypoint_names": COCO_PERSON_KEYPOINT_NAMES,
"keypoint_flip_map": COCO_PERSON_KEYPOINT_FLIP_MAP,
"keypoint_connection_rules": KEYPOINT_CONNECTION_RULES,
}
elif dataset_name == "cityscapes":
# fmt: off
CITYSCAPES_THING_CLASSES = [
"person", "rider", "car", "truck",
"bus", "train", "motorcycle", "bicycle",
]
CITYSCAPES_STUFF_CLASSES = [
"road", "sidewalk", "building", "wall", "fence", "pole", "traffic light",
"traffic sign", "vegetation", "terrain", "sky", "person", "rider", "car",
"truck", "bus", "train", "motorcycle", "bicycle",
]
# fmt: on
return {
"thing_classes": CITYSCAPES_THING_CLASSES,
"stuff_classes": CITYSCAPES_STUFF_CLASSES,
}
raise KeyError("No built-in metadata for dataset {}".format(dataset_name))
|
banmo-main
|
third_party/detectron2_old/detectron2/data/datasets/builtin_meta.py
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates.
"""
Implement many useful :class:`Augmentation`.
"""
import numpy as np
import sys
from typing import Tuple
from fvcore.transforms.transform import (
BlendTransform,
CropTransform,
HFlipTransform,
NoOpTransform,
PadTransform,
Transform,
TransformList,
VFlipTransform,
)
from PIL import Image
from .augmentation import Augmentation, _transform_to_aug
from .transform import ExtentTransform, ResizeTransform, RotationTransform
__all__ = [
"FixedSizeCrop",
"RandomApply",
"RandomBrightness",
"RandomContrast",
"RandomCrop",
"RandomExtent",
"RandomFlip",
"RandomSaturation",
"RandomLighting",
"RandomRotation",
"Resize",
"ResizeScale",
"ResizeShortestEdge",
"RandomCrop_CategoryAreaConstraint",
]
class RandomApply(Augmentation):
"""
Randomly apply an augmentation with a given probability.
"""
def __init__(self, tfm_or_aug, prob=0.5):
"""
Args:
tfm_or_aug (Transform, Augmentation): the transform or augmentation
to be applied. It can either be a `Transform` or `Augmentation`
instance.
prob (float): probability between 0.0 and 1.0 that
the wrapper transformation is applied
"""
super().__init__()
self.aug = _transform_to_aug(tfm_or_aug)
assert 0.0 <= prob <= 1.0, f"Probablity must be between 0.0 and 1.0 (given: {prob})"
self.prob = prob
def get_transform(self, *args):
do = self._rand_range() < self.prob
if do:
return self.aug.get_transform(*args)
else:
return NoOpTransform()
def __call__(self, aug_input):
do = self._rand_range() < self.prob
if do:
return self.aug(aug_input)
else:
return NoOpTransform()
class RandomFlip(Augmentation):
"""
Flip the image horizontally or vertically with the given probability.
"""
def __init__(self, prob=0.5, *, horizontal=True, vertical=False):
"""
Args:
prob (float): probability of flip.
horizontal (boolean): whether to apply horizontal flipping
vertical (boolean): whether to apply vertical flipping
"""
super().__init__()
if horizontal and vertical:
raise ValueError("Cannot do both horiz and vert. Please use two Flip instead.")
if not horizontal and not vertical:
raise ValueError("At least one of horiz or vert has to be True!")
self._init(locals())
def get_transform(self, image):
h, w = image.shape[:2]
do = self._rand_range() < self.prob
if do:
if self.horizontal:
return HFlipTransform(w)
elif self.vertical:
return VFlipTransform(h)
else:
return NoOpTransform()
class Resize(Augmentation):
"""Resize image to a fixed target size"""
def __init__(self, shape, interp=Image.BILINEAR):
"""
Args:
shape: (h, w) tuple or a int
interp: PIL interpolation method
"""
if isinstance(shape, int):
shape = (shape, shape)
shape = tuple(shape)
self._init(locals())
def get_transform(self, image):
return ResizeTransform(
image.shape[0], image.shape[1], self.shape[0], self.shape[1], self.interp
)
class ResizeShortestEdge(Augmentation):
"""
Scale the shorter edge to the given size, with a limit of `max_size` on the longer edge.
If `max_size` is reached, then downscale so that the longer edge does not exceed max_size.
"""
def __init__(
self, short_edge_length, max_size=sys.maxsize, sample_style="range", interp=Image.BILINEAR
):
"""
Args:
short_edge_length (list[int]): If ``sample_style=="range"``,
a [min, max] interval from which to sample the shortest edge length.
If ``sample_style=="choice"``, a list of shortest edge lengths to sample from.
max_size (int): maximum allowed longest edge length.
sample_style (str): either "range" or "choice".
"""
super().__init__()
assert sample_style in ["range", "choice"], sample_style
self.is_range = sample_style == "range"
if isinstance(short_edge_length, int):
short_edge_length = (short_edge_length, short_edge_length)
if self.is_range:
assert len(short_edge_length) == 2, (
"short_edge_length must be two values using 'range' sample style."
f" Got {short_edge_length}!"
)
self._init(locals())
def get_transform(self, image):
h, w = image.shape[:2]
if self.is_range:
size = np.random.randint(self.short_edge_length[0], self.short_edge_length[1] + 1)
else:
size = np.random.choice(self.short_edge_length)
if size == 0:
return NoOpTransform()
scale = size * 1.0 / min(h, w)
if h < w:
newh, neww = size, scale * w
else:
newh, neww = scale * h, size
if max(newh, neww) > self.max_size:
scale = self.max_size * 1.0 / max(newh, neww)
newh = newh * scale
neww = neww * scale
neww = int(neww + 0.5)
newh = int(newh + 0.5)
return ResizeTransform(h, w, newh, neww, self.interp)
class ResizeScale(Augmentation):
"""
Takes target size as input and randomly scales the given target size between `min_scale`
and `max_scale`. It then scales the input image such that it fits inside the scaled target
box, keeping the aspect ratio constant.
This implements the resize part of the Google's 'resize_and_crop' data augmentation:
https://github.com/tensorflow/tpu/blob/master/models/official/detection/utils/input_utils.py#L127
"""
def __init__(
self,
min_scale: float,
max_scale: float,
target_height: int,
target_width: int,
interp: int = Image.BILINEAR,
):
"""
Args:
min_scale: minimum image scale range.
max_scale: maximum image scale range.
target_height: target image height.
target_width: target image width.
interp: image interpolation method.
"""
super().__init__()
self._init(locals())
def get_transform(self, image: np.ndarray) -> Transform:
# Compute the image scale and scaled size.
input_size = image.shape[:2]
output_size = (self.target_height, self.target_width)
random_scale = np.random.uniform(self.min_scale, self.max_scale)
random_scale_size = np.multiply(output_size, random_scale)
scale = np.minimum(
random_scale_size[0] / input_size[0], random_scale_size[1] / input_size[1]
)
scaled_size = np.round(np.multiply(input_size, scale)).astype(int)
return ResizeTransform(
input_size[0], input_size[1], scaled_size[0], scaled_size[1], self.interp
)
class RandomRotation(Augmentation):
"""
This method returns a copy of this image, rotated the given
number of degrees counter clockwise around the given center.
"""
def __init__(self, angle, expand=True, center=None, sample_style="range", interp=None):
"""
Args:
angle (list[float]): If ``sample_style=="range"``,
a [min, max] interval from which to sample the angle (in degrees).
If ``sample_style=="choice"``, a list of angles to sample from
expand (bool): choose if the image should be resized to fit the whole
rotated image (default), or simply cropped
center (list[[float, float]]): If ``sample_style=="range"``,
a [[minx, miny], [maxx, maxy]] relative interval from which to sample the center,
[0, 0] being the top left of the image and [1, 1] the bottom right.
If ``sample_style=="choice"``, a list of centers to sample from
Default: None, which means that the center of rotation is the center of the image
center has no effect if expand=True because it only affects shifting
"""
super().__init__()
assert sample_style in ["range", "choice"], sample_style
self.is_range = sample_style == "range"
if isinstance(angle, (float, int)):
angle = (angle, angle)
if center is not None and isinstance(center[0], (float, int)):
center = (center, center)
self._init(locals())
def get_transform(self, image):
h, w = image.shape[:2]
center = None
if self.is_range:
angle = np.random.uniform(self.angle[0], self.angle[1])
if self.center is not None:
center = (
np.random.uniform(self.center[0][0], self.center[1][0]),
np.random.uniform(self.center[0][1], self.center[1][1]),
)
else:
angle = np.random.choice(self.angle)
if self.center is not None:
center = np.random.choice(self.center)
if center is not None:
center = (w * center[0], h * center[1]) # Convert to absolute coordinates
if angle % 360 == 0:
return NoOpTransform()
return RotationTransform(h, w, angle, expand=self.expand, center=center, interp=self.interp)
class FixedSizeCrop(Augmentation):
"""
If `crop_size` is smaller than the input image size, then it uses a random crop of
the crop size. If `crop_size` is larger than the input image size, then it pads
the right and the bottom of the image to the crop size.
"""
def __init__(self, crop_size: Tuple[int], pad_value: float = 128.0):
"""
Args:
crop_size: target image (height, width).
pad_value: the padding value.
"""
super().__init__()
self._init(locals())
def get_transform(self, image: np.ndarray) -> TransformList:
# Compute the image scale and scaled size.
input_size = image.shape[:2]
output_size = self.crop_size
# Add random crop if the image is scaled up.
max_offset = np.subtract(input_size, output_size)
max_offset = np.maximum(max_offset, 0)
offset = np.multiply(max_offset, np.random.uniform(0.0, 1.0))
offset = np.round(offset).astype(int)
crop_transform = CropTransform(
offset[1], offset[0], output_size[1], output_size[0], input_size[1], input_size[0]
)
# Add padding if the image is scaled down.
pad_size = np.subtract(output_size, input_size)
pad_size = np.maximum(pad_size, 0)
original_size = np.minimum(input_size, output_size)
pad_transform = PadTransform(
0, 0, pad_size[1], pad_size[0], original_size[1], original_size[0], self.pad_value
)
return TransformList([crop_transform, pad_transform])
class RandomCrop(Augmentation):
"""
Randomly crop a rectangle region out of an image.
"""
def __init__(self, crop_type: str, crop_size):
"""
Args:
crop_type (str): one of "relative_range", "relative", "absolute", "absolute_range".
crop_size (tuple[float, float]): two floats, explained below.
- "relative": crop a (H * crop_size[0], W * crop_size[1]) region from an input image of
size (H, W). crop size should be in (0, 1]
- "relative_range": uniformly sample two values from [crop_size[0], 1]
and [crop_size[1]], 1], and use them as in "relative" crop type.
- "absolute" crop a (crop_size[0], crop_size[1]) region from input image.
crop_size must be smaller than the input image size.
- "absolute_range", for an input of size (H, W), uniformly sample H_crop in
[crop_size[0], min(H, crop_size[1])] and W_crop in [crop_size[0], min(W, crop_size[1])].
Then crop a region (H_crop, W_crop).
"""
# TODO style of relative_range and absolute_range are not consistent:
# one takes (h, w) but another takes (min, max)
super().__init__()
assert crop_type in ["relative_range", "relative", "absolute", "absolute_range"]
self._init(locals())
def get_transform(self, image):
h, w = image.shape[:2]
croph, cropw = self.get_crop_size((h, w))
assert h >= croph and w >= cropw, "Shape computation in {} has bugs.".format(self)
h0 = np.random.randint(h - croph + 1)
w0 = np.random.randint(w - cropw + 1)
return CropTransform(w0, h0, cropw, croph)
def get_crop_size(self, image_size):
"""
Args:
image_size (tuple): height, width
Returns:
crop_size (tuple): height, width in absolute pixels
"""
h, w = image_size
if self.crop_type == "relative":
ch, cw = self.crop_size
return int(h * ch + 0.5), int(w * cw + 0.5)
elif self.crop_type == "relative_range":
crop_size = np.asarray(self.crop_size, dtype=np.float32)
ch, cw = crop_size + np.random.rand(2) * (1 - crop_size)
return int(h * ch + 0.5), int(w * cw + 0.5)
elif self.crop_type == "absolute":
return (min(self.crop_size[0], h), min(self.crop_size[1], w))
elif self.crop_type == "absolute_range":
assert self.crop_size[0] <= self.crop_size[1]
ch = np.random.randint(min(h, self.crop_size[0]), min(h, self.crop_size[1]) + 1)
cw = np.random.randint(min(w, self.crop_size[0]), min(w, self.crop_size[1]) + 1)
return ch, cw
else:
NotImplementedError("Unknown crop type {}".format(self.crop_type))
class RandomCrop_CategoryAreaConstraint(Augmentation):
"""
Similar to :class:`RandomCrop`, but find a cropping window such that no single category
occupies a ratio of more than `single_category_max_area` in semantic segmentation ground
truth, which can cause unstability in training. The function attempts to find such a valid
cropping window for at most 10 times.
"""
def __init__(
self,
crop_type: str,
crop_size,
single_category_max_area: float = 1.0,
ignored_category: int = None,
):
"""
Args:
crop_type, crop_size: same as in :class:`RandomCrop`
single_category_max_area: the maximum allowed area ratio of a
category. Set to 1.0 to disable
ignored_category: allow this category in the semantic segmentation
ground truth to exceed the area ratio. Usually set to the category
that's ignored in training.
"""
self.crop_aug = RandomCrop(crop_type, crop_size)
self._init(locals())
def get_transform(self, image, sem_seg):
if self.single_category_max_area >= 1.0:
return self.crop_aug.get_transform(image)
else:
h, w = sem_seg.shape
for _ in range(10):
crop_size = self.crop_aug.get_crop_size((h, w))
y0 = np.random.randint(h - crop_size[0] + 1)
x0 = np.random.randint(w - crop_size[1] + 1)
sem_seg_temp = sem_seg[y0 : y0 + crop_size[0], x0 : x0 + crop_size[1]]
labels, cnt = np.unique(sem_seg_temp, return_counts=True)
if self.ignored_category is not None:
cnt = cnt[labels != self.ignored_category]
if len(cnt) > 1 and np.max(cnt) < np.sum(cnt) * self.single_category_max_area:
break
crop_tfm = CropTransform(x0, y0, crop_size[1], crop_size[0])
return crop_tfm
class RandomExtent(Augmentation):
"""
Outputs an image by cropping a random "subrect" of the source image.
The subrect can be parameterized to include pixels outside the source image,
in which case they will be set to zeros (i.e. black). The size of the output
image will vary with the size of the random subrect.
"""
def __init__(self, scale_range, shift_range):
"""
Args:
output_size (h, w): Dimensions of output image
scale_range (l, h): Range of input-to-output size scaling factor
shift_range (x, y): Range of shifts of the cropped subrect. The rect
is shifted by [w / 2 * Uniform(-x, x), h / 2 * Uniform(-y, y)],
where (w, h) is the (width, height) of the input image. Set each
component to zero to crop at the image's center.
"""
super().__init__()
self._init(locals())
def get_transform(self, image):
img_h, img_w = image.shape[:2]
# Initialize src_rect to fit the input image.
src_rect = np.array([-0.5 * img_w, -0.5 * img_h, 0.5 * img_w, 0.5 * img_h])
# Apply a random scaling to the src_rect.
src_rect *= np.random.uniform(self.scale_range[0], self.scale_range[1])
# Apply a random shift to the coordinates origin.
src_rect[0::2] += self.shift_range[0] * img_w * (np.random.rand() - 0.5)
src_rect[1::2] += self.shift_range[1] * img_h * (np.random.rand() - 0.5)
# Map src_rect coordinates into image coordinates (center at corner).
src_rect[0::2] += 0.5 * img_w
src_rect[1::2] += 0.5 * img_h
return ExtentTransform(
src_rect=(src_rect[0], src_rect[1], src_rect[2], src_rect[3]),
output_size=(int(src_rect[3] - src_rect[1]), int(src_rect[2] - src_rect[0])),
)
class RandomContrast(Augmentation):
"""
Randomly transforms image contrast.
Contrast intensity is uniformly sampled in (intensity_min, intensity_max).
- intensity < 1 will reduce contrast
- intensity = 1 will preserve the input image
- intensity > 1 will increase contrast
See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html
"""
def __init__(self, intensity_min, intensity_max):
"""
Args:
intensity_min (float): Minimum augmentation
intensity_max (float): Maximum augmentation
"""
super().__init__()
self._init(locals())
def get_transform(self, image):
w = np.random.uniform(self.intensity_min, self.intensity_max)
return BlendTransform(src_image=image.mean(), src_weight=1 - w, dst_weight=w)
class RandomBrightness(Augmentation):
"""
Randomly transforms image brightness.
Brightness intensity is uniformly sampled in (intensity_min, intensity_max).
- intensity < 1 will reduce brightness
- intensity = 1 will preserve the input image
- intensity > 1 will increase brightness
See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html
"""
def __init__(self, intensity_min, intensity_max):
"""
Args:
intensity_min (float): Minimum augmentation
intensity_max (float): Maximum augmentation
"""
super().__init__()
self._init(locals())
def get_transform(self, image):
w = np.random.uniform(self.intensity_min, self.intensity_max)
return BlendTransform(src_image=0, src_weight=1 - w, dst_weight=w)
class RandomSaturation(Augmentation):
"""
Randomly transforms saturation of an RGB image.
Input images are assumed to have 'RGB' channel order.
Saturation intensity is uniformly sampled in (intensity_min, intensity_max).
- intensity < 1 will reduce saturation (make the image more grayscale)
- intensity = 1 will preserve the input image
- intensity > 1 will increase saturation
See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html
"""
def __init__(self, intensity_min, intensity_max):
"""
Args:
intensity_min (float): Minimum augmentation (1 preserves input).
intensity_max (float): Maximum augmentation (1 preserves input).
"""
super().__init__()
self._init(locals())
def get_transform(self, image):
assert image.shape[-1] == 3, "RandomSaturation only works on RGB images"
w = np.random.uniform(self.intensity_min, self.intensity_max)
grayscale = image.dot([0.299, 0.587, 0.114])[:, :, np.newaxis]
return BlendTransform(src_image=grayscale, src_weight=1 - w, dst_weight=w)
class RandomLighting(Augmentation):
"""
The "lighting" augmentation described in AlexNet, using fixed PCA over ImageNet.
Input images are assumed to have 'RGB' channel order.
The degree of color jittering is randomly sampled via a normal distribution,
with standard deviation given by the scale parameter.
"""
def __init__(self, scale):
"""
Args:
scale (float): Standard deviation of principal component weighting.
"""
super().__init__()
self._init(locals())
self.eigen_vecs = np.array(
[[-0.5675, 0.7192, 0.4009], [-0.5808, -0.0045, -0.8140], [-0.5836, -0.6948, 0.4203]]
)
self.eigen_vals = np.array([0.2175, 0.0188, 0.0045])
def get_transform(self, image):
assert image.shape[-1] == 3, "RandomLighting only works on RGB images"
weights = np.random.normal(scale=self.scale, size=3)
return BlendTransform(
src_image=self.eigen_vecs.dot(weights * self.eigen_vals), src_weight=1.0, dst_weight=1.0
)
|
banmo-main
|
third_party/detectron2_old/detectron2/data/transforms/augmentation_impl.py
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates.
import inspect
import numpy as np
import pprint
from typing import Any, List, Optional, Tuple, Union
from fvcore.transforms.transform import Transform, TransformList
"""
See "Data Augmentation" tutorial for an overview of the system:
https://detectron2.readthedocs.io/tutorials/augmentation.html
"""
__all__ = [
"Augmentation",
"AugmentationList",
"AugInput",
"TransformGen",
"apply_transform_gens",
"StandardAugInput",
"apply_augmentations",
]
def _check_img_dtype(img):
assert isinstance(img, np.ndarray), "[Augmentation] Needs an numpy array, but got a {}!".format(
type(img)
)
assert not isinstance(img.dtype, np.integer) or (
img.dtype == np.uint8
), "[Augmentation] Got image of type {}, use uint8 or floating points instead!".format(
img.dtype
)
assert img.ndim in [2, 3], img.ndim
def _get_aug_input_args(aug, aug_input) -> List[Any]:
"""
Get the arguments to be passed to ``aug.get_transform`` from the input ``aug_input``.
"""
if aug.input_args is None:
# Decide what attributes are needed automatically
prms = list(inspect.signature(aug.get_transform).parameters.items())
# The default behavior is: if there is one parameter, then its "image"
# (work automatically for majority of use cases, and also avoid BC breaking),
# Otherwise, use the argument names.
if len(prms) == 1:
names = ("image",)
else:
names = []
for name, prm in prms:
if prm.kind in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD):
raise TypeError(
f""" \
The default implementation of `{type(aug)}.__call__` does not allow \
`{type(aug)}.get_transform` to use variable-length arguments (*args, **kwargs)! \
If arguments are unknown, reimplement `__call__` instead. \
"""
)
names.append(name)
aug.input_args = tuple(names)
args = []
for f in aug.input_args:
try:
args.append(getattr(aug_input, f))
except AttributeError as e:
raise AttributeError(
f"{type(aug)}.get_transform needs input attribute '{f}', "
f"but it is not an attribute of {type(aug_input)}!"
) from e
return args
class Augmentation:
"""
Augmentation defines (often random) policies/strategies to generate :class:`Transform`
from data. It is often used for pre-processing of input data.
A "policy" that generates a :class:`Transform` may, in the most general case,
need arbitrary information from input data in order to determine what transforms
to apply. Therefore, each :class:`Augmentation` instance defines the arguments
needed by its :meth:`get_transform` method. When called with the positional arguments,
the :meth:`get_transform` method executes the policy.
Note that :class:`Augmentation` defines the policies to create a :class:`Transform`,
but not how to execute the actual transform operations to those data.
Its :meth:`__call__` method will use :meth:`AugInput.transform` to execute the transform.
The returned `Transform` object is meant to describe deterministic transformation, which means
it can be re-applied on associated data, e.g. the geometry of an image and its segmentation
masks need to be transformed together.
(If such re-application is not needed, then determinism is not a crucial requirement.)
"""
input_args: Optional[Tuple[str]] = None
"""
Stores the attribute names needed by :meth:`get_transform`, e.g. ``("image", "sem_seg")``.
By default, it is just a tuple of argument names in :meth:`self.get_transform`, which often only
contain "image". As long as the argument name convention is followed, there is no need for
users to touch this attribute.
"""
def _init(self, params=None):
if params:
for k, v in params.items():
if k != "self" and not k.startswith("_"):
setattr(self, k, v)
def get_transform(self, *args) -> Transform:
"""
Execute the policy based on input data, and decide what transform to apply to inputs.
Args:
args: Any fixed-length positional arguments. By default, the name of the arguments
should exist in the :class:`AugInput` to be used.
Returns:
Transform: Returns the deterministic transform to apply to the input.
Examples:
::
class MyAug:
# if a policy needs to know both image and semantic segmentation
def get_transform(image, sem_seg) -> T.Transform:
pass
tfm: Transform = MyAug().get_transform(image, sem_seg)
new_image = tfm.apply_image(image)
Notes:
Users can freely use arbitrary new argument names in custom
:meth:`get_transform` method, as long as they are available in the
input data. In detectron2 we use the following convention:
* image: (H,W) or (H,W,C) ndarray of type uint8 in range [0, 255], or
floating point in range [0, 1] or [0, 255].
* boxes: (N,4) ndarray of float32. It represents the instance bounding boxes
of N instances. Each is in XYXY format in unit of absolute coordinates.
* sem_seg: (H,W) ndarray of type uint8. Each element is an integer label of pixel.
We do not specify convention for other types and do not include builtin
:class:`Augmentation` that uses other types in detectron2.
"""
raise NotImplementedError
def __call__(self, aug_input) -> Transform:
"""
Augment the given `aug_input` **in-place**, and return the transform that's used.
This method will be called to apply the augmentation. In most augmentation, it
is enough to use the default implementation, which calls :meth:`get_transform`
using the inputs. But a subclass can overwrite it to have more complicated logic.
Args:
aug_input (AugInput): an object that has attributes needed by this augmentation
(defined by ``self.get_transform``). Its ``transform`` method will be called
to in-place transform it.
Returns:
Transform: the transform that is applied on the input.
"""
args = _get_aug_input_args(self, aug_input)
tfm = self.get_transform(*args)
assert isinstance(tfm, (Transform, TransformList)), (
f"{type(self)}.get_transform must return an instance of Transform! "
"Got {type(tfm)} instead."
)
aug_input.transform(tfm)
return tfm
def _rand_range(self, low=1.0, high=None, size=None):
"""
Uniform float random number between low and high.
"""
if high is None:
low, high = 0, low
if size is None:
size = []
return np.random.uniform(low, high, size)
def __repr__(self):
"""
Produce something like:
"MyAugmentation(field1={self.field1}, field2={self.field2})"
"""
try:
sig = inspect.signature(self.__init__)
classname = type(self).__name__
argstr = []
for name, param in sig.parameters.items():
assert (
param.kind != param.VAR_POSITIONAL and param.kind != param.VAR_KEYWORD
), "The default __repr__ doesn't support *args or **kwargs"
assert hasattr(self, name), (
"Attribute {} not found! "
"Default __repr__ only works if attributes match the constructor.".format(name)
)
attr = getattr(self, name)
default = param.default
if default is attr:
continue
attr_str = pprint.pformat(attr)
if "\n" in attr_str:
# don't show it if pformat decides to use >1 lines
attr_str = "..."
argstr.append("{}={}".format(name, attr_str))
return "{}({})".format(classname, ", ".join(argstr))
except AssertionError:
return super().__repr__()
__str__ = __repr__
def _transform_to_aug(tfm_or_aug):
"""
Wrap Transform into Augmentation.
Private, used internally to implement augmentations.
"""
assert isinstance(tfm_or_aug, (Transform, Augmentation)), tfm_or_aug
if isinstance(tfm_or_aug, Augmentation):
return tfm_or_aug
else:
class _TransformToAug(Augmentation):
def __init__(self, tfm: Transform):
self.tfm = tfm
def get_transform(self, *args):
return self.tfm
def __repr__(self):
return repr(self.tfm)
__str__ = __repr__
return _TransformToAug(tfm_or_aug)
class AugmentationList(Augmentation):
"""
Apply a sequence of augmentations.
It has ``__call__`` method to apply the augmentations.
Note that :meth:`get_transform` method is impossible (will throw error if called)
for :class:`AugmentationList`, because in order to apply a sequence of augmentations,
the kth augmentation must be applied first, to provide inputs needed by the (k+1)th
augmentation.
"""
def __init__(self, augs):
"""
Args:
augs (list[Augmentation or Transform]):
"""
super().__init__()
self.augs = [_transform_to_aug(x) for x in augs]
def __call__(self, aug_input) -> Transform:
tfms = []
for x in self.augs:
tfm = x(aug_input)
tfms.append(tfm)
return TransformList(tfms)
def __repr__(self):
msgs = [str(x) for x in self.augs]
return "AugmentationList[{}]".format(", ".join(msgs))
__str__ = __repr__
class AugInput:
"""
Input that can be used with :meth:`Augmentation.__call__`.
This is a standard implementation for the majority of use cases.
This class provides the standard attributes **"image", "boxes", "sem_seg"**
defined in :meth:`__init__` and they may be needed by different augmentations.
Most augmentation policies do not need attributes beyond these three.
After applying augmentations to these attributes (using :meth:`AugInput.transform`),
the returned transforms can then be used to transform other data structures that users have.
Examples:
::
input = AugInput(image, boxes=boxes)
tfms = augmentation(input)
transformed_image = input.image
transformed_boxes = input.boxes
transformed_other_data = tfms.apply_other(other_data)
An extended project that works with new data types may implement augmentation policies
that need other inputs. An algorithm may need to transform inputs in a way different
from the standard approach defined in this class. In those rare situations, users can
implement a class similar to this class, that satify the following condition:
* The input must provide access to these data in the form of attribute access
(``getattr``). For example, if an :class:`Augmentation` to be applied needs "image"
and "sem_seg" arguments, its input must have the attribute "image" and "sem_seg".
* The input must have a ``transform(tfm: Transform) -> None`` method which
in-place transforms all its attributes.
"""
# TODO maybe should support more builtin data types here
def __init__(
self,
image: np.ndarray,
*,
boxes: Optional[np.ndarray] = None,
sem_seg: Optional[np.ndarray] = None,
):
"""
Args:
image (ndarray): (H,W) or (H,W,C) ndarray of type uint8 in range [0, 255], or
floating point in range [0, 1] or [0, 255]. The meaning of C is up
to users.
boxes (ndarray or None): Nx4 float32 boxes in XYXY_ABS mode
sem_seg (ndarray or None): HxW uint8 semantic segmentation mask. Each element
is an integer label of pixel.
"""
_check_img_dtype(image)
self.image = image
self.boxes = boxes
self.sem_seg = sem_seg
def transform(self, tfm: Transform) -> None:
"""
In-place transform all attributes of this class.
By "in-place", it means after calling this method, accessing an attribute such
as ``self.image`` will return transformed data.
"""
self.image = tfm.apply_image(self.image)
if self.boxes is not None:
self.boxes = tfm.apply_box(self.boxes)
if self.sem_seg is not None:
self.sem_seg = tfm.apply_segmentation(self.sem_seg)
def apply_augmentations(
self, augmentations: List[Union[Augmentation, Transform]]
) -> TransformList:
"""
Equivalent of ``AugmentationList(augmentations)(self)``
"""
return AugmentationList(augmentations)(self)
def apply_augmentations(augmentations: List[Union[Transform, Augmentation]], inputs):
"""
Use ``T.AugmentationList(augmentations)(inputs)`` instead.
"""
if isinstance(inputs, np.ndarray):
# handle the common case of image-only Augmentation, also for backward compatibility
image_only = True
inputs = AugInput(inputs)
else:
image_only = False
tfms = inputs.apply_augmentations(augmentations)
return inputs.image if image_only else inputs, tfms
apply_transform_gens = apply_augmentations
"""
Alias for backward-compatibility.
"""
TransformGen = Augmentation
"""
Alias for Augmentation, since it is something that generates :class:`Transform`s
"""
StandardAugInput = AugInput
"""
Alias for compatibility. It's not worth the complexity to have two classes.
"""
|
banmo-main
|
third_party/detectron2_old/detectron2/data/transforms/augmentation.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
from fvcore.transforms.transform import Transform, TransformList # order them first
from fvcore.transforms.transform import *
from .transform import *
from .augmentation import *
from .augmentation_impl import *
__all__ = [k for k in globals().keys() if not k.startswith("_")]
from detectron2.utils.env import fixup_module_metadata
fixup_module_metadata(__name__, globals(), __all__)
del fixup_module_metadata
|
banmo-main
|
third_party/detectron2_old/detectron2/data/transforms/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates.
"""
See "Data Augmentation" tutorial for an overview of the system:
https://detectron2.readthedocs.io/tutorials/augmentation.html
"""
import numpy as np
import torch
import torch.nn.functional as F
from fvcore.transforms.transform import (
CropTransform,
HFlipTransform,
NoOpTransform,
Transform,
TransformList,
)
from PIL import Image
try:
import cv2 # noqa
except ImportError:
# OpenCV is an optional dependency at the moment
pass
__all__ = [
"ExtentTransform",
"ResizeTransform",
"RotationTransform",
"ColorTransform",
"PILColorTransform",
]
class ExtentTransform(Transform):
"""
Extracts a subregion from the source image and scales it to the output size.
The fill color is used to map pixels from the source rect that fall outside
the source image.
See: https://pillow.readthedocs.io/en/latest/PIL.html#PIL.ImageTransform.ExtentTransform
"""
def __init__(self, src_rect, output_size, interp=Image.LINEAR, fill=0):
"""
Args:
src_rect (x0, y0, x1, y1): src coordinates
output_size (h, w): dst image size
interp: PIL interpolation methods
fill: Fill color used when src_rect extends outside image
"""
super().__init__()
self._set_attributes(locals())
def apply_image(self, img, interp=None):
h, w = self.output_size
if len(img.shape) > 2 and img.shape[2] == 1:
pil_image = Image.fromarray(img[:, :, 0], mode="L")
else:
pil_image = Image.fromarray(img)
pil_image = pil_image.transform(
size=(w, h),
method=Image.EXTENT,
data=self.src_rect,
resample=interp if interp else self.interp,
fill=self.fill,
)
ret = np.asarray(pil_image)
if len(img.shape) > 2 and img.shape[2] == 1:
ret = np.expand_dims(ret, -1)
return ret
def apply_coords(self, coords):
# Transform image center from source coordinates into output coordinates
# and then map the new origin to the corner of the output image.
h, w = self.output_size
x0, y0, x1, y1 = self.src_rect
new_coords = coords.astype(np.float32)
new_coords[:, 0] -= 0.5 * (x0 + x1)
new_coords[:, 1] -= 0.5 * (y0 + y1)
new_coords[:, 0] *= w / (x1 - x0)
new_coords[:, 1] *= h / (y1 - y0)
new_coords[:, 0] += 0.5 * w
new_coords[:, 1] += 0.5 * h
return new_coords
def apply_segmentation(self, segmentation):
segmentation = self.apply_image(segmentation, interp=Image.NEAREST)
return segmentation
class ResizeTransform(Transform):
"""
Resize the image to a target size.
"""
def __init__(self, h, w, new_h, new_w, interp=None):
"""
Args:
h, w (int): original image size
new_h, new_w (int): new image size
interp: PIL interpolation methods, defaults to bilinear.
"""
# TODO decide on PIL vs opencv
super().__init__()
if interp is None:
interp = Image.BILINEAR
self._set_attributes(locals())
def apply_image(self, img, interp=None):
assert img.shape[:2] == (self.h, self.w)
assert len(img.shape) <= 4
interp_method = interp if interp is not None else self.interp
if img.dtype == np.uint8:
if len(img.shape) > 2 and img.shape[2] == 1:
pil_image = Image.fromarray(img[:, :, 0], mode="L")
else:
pil_image = Image.fromarray(img)
pil_image = pil_image.resize((self.new_w, self.new_h), interp_method)
ret = np.asarray(pil_image)
if len(img.shape) > 2 and img.shape[2] == 1:
ret = np.expand_dims(ret, -1)
else:
# PIL only supports uint8
if any(x < 0 for x in img.strides):
img = np.ascontiguousarray(img)
img = torch.from_numpy(img)
shape = list(img.shape)
shape_4d = shape[:2] + [1] * (4 - len(shape)) + shape[2:]
img = img.view(shape_4d).permute(2, 3, 0, 1) # hw(c) -> nchw
_PIL_RESIZE_TO_INTERPOLATE_MODE = {
Image.NEAREST: "nearest",
Image.BILINEAR: "bilinear",
Image.BICUBIC: "bicubic",
}
mode = _PIL_RESIZE_TO_INTERPOLATE_MODE[interp_method]
align_corners = None if mode == "nearest" else False
img = F.interpolate(
img, (self.new_h, self.new_w), mode=mode, align_corners=align_corners
)
shape[:2] = (self.new_h, self.new_w)
ret = img.permute(2, 3, 0, 1).view(shape).numpy() # nchw -> hw(c)
return ret
def apply_coords(self, coords):
coords[:, 0] = coords[:, 0] * (self.new_w * 1.0 / self.w)
coords[:, 1] = coords[:, 1] * (self.new_h * 1.0 / self.h)
return coords
def apply_segmentation(self, segmentation):
segmentation = self.apply_image(segmentation, interp=Image.NEAREST)
return segmentation
def inverse(self):
return ResizeTransform(self.new_h, self.new_w, self.h, self.w, self.interp)
class RotationTransform(Transform):
"""
This method returns a copy of this image, rotated the given
number of degrees counter clockwise around its center.
"""
def __init__(self, h, w, angle, expand=True, center=None, interp=None):
"""
Args:
h, w (int): original image size
angle (float): degrees for rotation
expand (bool): choose if the image should be resized to fit the whole
rotated image (default), or simply cropped
center (tuple (width, height)): coordinates of the rotation center
if left to None, the center will be fit to the center of each image
center has no effect if expand=True because it only affects shifting
interp: cv2 interpolation method, default cv2.INTER_LINEAR
"""
super().__init__()
image_center = np.array((w / 2, h / 2))
if center is None:
center = image_center
if interp is None:
interp = cv2.INTER_LINEAR
abs_cos, abs_sin = (abs(np.cos(np.deg2rad(angle))), abs(np.sin(np.deg2rad(angle))))
if expand:
# find the new width and height bounds
bound_w, bound_h = np.rint(
[h * abs_sin + w * abs_cos, h * abs_cos + w * abs_sin]
).astype(int)
else:
bound_w, bound_h = w, h
self._set_attributes(locals())
self.rm_coords = self.create_rotation_matrix()
# Needed because of this problem https://github.com/opencv/opencv/issues/11784
self.rm_image = self.create_rotation_matrix(offset=-0.5)
def apply_image(self, img, interp=None):
"""
img should be a numpy array, formatted as Height * Width * Nchannels
"""
if len(img) == 0 or self.angle % 360 == 0:
return img
assert img.shape[:2] == (self.h, self.w)
interp = interp if interp is not None else self.interp
return cv2.warpAffine(img, self.rm_image, (self.bound_w, self.bound_h), flags=interp)
def apply_coords(self, coords):
"""
coords should be a N * 2 array-like, containing N couples of (x, y) points
"""
coords = np.asarray(coords, dtype=float)
if len(coords) == 0 or self.angle % 360 == 0:
return coords
return cv2.transform(coords[:, np.newaxis, :], self.rm_coords)[:, 0, :]
def apply_segmentation(self, segmentation):
segmentation = self.apply_image(segmentation, interp=cv2.INTER_NEAREST)
return segmentation
def create_rotation_matrix(self, offset=0):
center = (self.center[0] + offset, self.center[1] + offset)
rm = cv2.getRotationMatrix2D(tuple(center), self.angle, 1)
if self.expand:
# Find the coordinates of the center of rotation in the new image
# The only point for which we know the future coordinates is the center of the image
rot_im_center = cv2.transform(self.image_center[None, None, :] + offset, rm)[0, 0, :]
new_center = np.array([self.bound_w / 2, self.bound_h / 2]) + offset - rot_im_center
# shift the rotation center to the new coordinates
rm[:, 2] += new_center
return rm
def inverse(self):
"""
The inverse is to rotate it back with expand, and crop to get the original shape.
"""
if not self.expand: # Not possible to inverse if a part of the image is lost
raise NotImplementedError()
rotation = RotationTransform(
self.bound_h, self.bound_w, -self.angle, True, None, self.interp
)
crop = CropTransform(
(rotation.bound_w - self.w) // 2, (rotation.bound_h - self.h) // 2, self.w, self.h
)
return TransformList([rotation, crop])
class ColorTransform(Transform):
"""
Generic wrapper for any photometric transforms.
These transformations should only affect the color space and
not the coordinate space of the image (e.g. annotation
coordinates such as bounding boxes should not be changed)
"""
def __init__(self, op):
"""
Args:
op (Callable): operation to be applied to the image,
which takes in an ndarray and returns an ndarray.
"""
if not callable(op):
raise ValueError("op parameter should be callable")
super().__init__()
self._set_attributes(locals())
def apply_image(self, img):
return self.op(img)
def apply_coords(self, coords):
return coords
def inverse(self):
return NoOpTransform()
def apply_segmentation(self, segmentation):
return segmentation
class PILColorTransform(ColorTransform):
"""
Generic wrapper for PIL Photometric image transforms,
which affect the color space and not the coordinate
space of the image
"""
def __init__(self, op):
"""
Args:
op (Callable): operation to be applied to the image,
which takes in a PIL Image and returns a transformed
PIL Image.
For reference on possible operations see:
- https://pillow.readthedocs.io/en/stable/
"""
if not callable(op):
raise ValueError("op parameter should be callable")
super().__init__(op)
def apply_image(self, img):
img = Image.fromarray(img)
return np.asarray(super().apply_image(img))
def HFlip_rotated_box(transform, rotated_boxes):
"""
Apply the horizontal flip transform on rotated boxes.
Args:
rotated_boxes (ndarray): Nx5 floating point array of
(x_center, y_center, width, height, angle_degrees) format
in absolute coordinates.
"""
# Transform x_center
rotated_boxes[:, 0] = transform.width - rotated_boxes[:, 0]
# Transform angle
rotated_boxes[:, 4] = -rotated_boxes[:, 4]
return rotated_boxes
def Resize_rotated_box(transform, rotated_boxes):
"""
Apply the resizing transform on rotated boxes. For details of how these (approximation)
formulas are derived, please refer to :meth:`RotatedBoxes.scale`.
Args:
rotated_boxes (ndarray): Nx5 floating point array of
(x_center, y_center, width, height, angle_degrees) format
in absolute coordinates.
"""
scale_factor_x = transform.new_w * 1.0 / transform.w
scale_factor_y = transform.new_h * 1.0 / transform.h
rotated_boxes[:, 0] *= scale_factor_x
rotated_boxes[:, 1] *= scale_factor_y
theta = rotated_boxes[:, 4] * np.pi / 180.0
c = np.cos(theta)
s = np.sin(theta)
rotated_boxes[:, 2] *= np.sqrt(np.square(scale_factor_x * c) + np.square(scale_factor_y * s))
rotated_boxes[:, 3] *= np.sqrt(np.square(scale_factor_x * s) + np.square(scale_factor_y * c))
rotated_boxes[:, 4] = np.arctan2(scale_factor_x * s, scale_factor_y * c) * 180 / np.pi
return rotated_boxes
HFlipTransform.register_type("rotated_box", HFlip_rotated_box)
ResizeTransform.register_type("rotated_box", Resize_rotated_box)
# not necessary any more with latest fvcore
NoOpTransform.register_type("rotated_box", lambda t, x: x)
|
banmo-main
|
third_party/detectron2_old/detectron2/data/transforms/transform.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
from .distributed_sampler import InferenceSampler, RepeatFactorTrainingSampler, TrainingSampler
from .grouped_batch_sampler import GroupedBatchSampler
__all__ = [
"GroupedBatchSampler",
"TrainingSampler",
"InferenceSampler",
"RepeatFactorTrainingSampler",
]
|
banmo-main
|
third_party/detectron2_old/detectron2/data/samplers/__init__.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
import numpy as np
from torch.utils.data.sampler import BatchSampler, Sampler
class GroupedBatchSampler(BatchSampler):
"""
Wraps another sampler to yield a mini-batch of indices.
It enforces that the batch only contain elements from the same group.
It also tries to provide mini-batches which follows an ordering which is
as close as possible to the ordering from the original sampler.
"""
def __init__(self, sampler, group_ids, batch_size):
"""
Args:
sampler (Sampler): Base sampler.
group_ids (list[int]): If the sampler produces indices in range [0, N),
`group_ids` must be a list of `N` ints which contains the group id of each sample.
The group ids must be a set of integers in the range [0, num_groups).
batch_size (int): Size of mini-batch.
"""
if not isinstance(sampler, Sampler):
raise ValueError(
"sampler should be an instance of "
"torch.utils.data.Sampler, but got sampler={}".format(sampler)
)
self.sampler = sampler
self.group_ids = np.asarray(group_ids)
assert self.group_ids.ndim == 1
self.batch_size = batch_size
groups = np.unique(self.group_ids).tolist()
# buffer the indices of each group until batch size is reached
self.buffer_per_group = {k: [] for k in groups}
def __iter__(self):
for idx in self.sampler:
group_id = self.group_ids[idx]
group_buffer = self.buffer_per_group[group_id]
group_buffer.append(idx)
if len(group_buffer) == self.batch_size:
yield group_buffer[:] # yield a copy of the list
del group_buffer[:]
def __len__(self):
raise NotImplementedError("len() of GroupedBatchSampler is not well-defined.")
|
banmo-main
|
third_party/detectron2_old/detectron2/data/samplers/grouped_batch_sampler.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
import itertools
import math
from collections import defaultdict
from typing import Optional
import torch
from torch.utils.data.sampler import Sampler
from detectron2.utils import comm
class TrainingSampler(Sampler):
"""
In training, we only care about the "infinite stream" of training data.
So this sampler produces an infinite stream of indices and
all workers cooperate to correctly shuffle the indices and sample different indices.
The samplers in each worker effectively produces `indices[worker_id::num_workers]`
where `indices` is an infinite stream of indices consisting of
`shuffle(range(size)) + shuffle(range(size)) + ...` (if shuffle is True)
or `range(size) + range(size) + ...` (if shuffle is False)
"""
def __init__(self, size: int, shuffle: bool = True, seed: Optional[int] = None):
"""
Args:
size (int): the total number of data of the underlying dataset to sample from
shuffle (bool): whether to shuffle the indices or not
seed (int): the initial seed of the shuffle. Must be the same
across all workers. If None, will use a random seed shared
among workers (require synchronization among all workers).
"""
self._size = size
assert size > 0
self._shuffle = shuffle
if seed is None:
seed = comm.shared_random_seed()
self._seed = int(seed)
self._rank = comm.get_rank()
self._world_size = comm.get_world_size()
def __iter__(self):
start = self._rank
yield from itertools.islice(self._infinite_indices(), start, None, self._world_size)
def _infinite_indices(self):
g = torch.Generator()
g.manual_seed(self._seed)
while True:
if self._shuffle:
yield from torch.randperm(self._size, generator=g).tolist()
else:
yield from torch.arange(self._size).tolist()
class RepeatFactorTrainingSampler(Sampler):
"""
Similar to TrainingSampler, but a sample may appear more times than others based
on its "repeat factor". This is suitable for training on class imbalanced datasets like LVIS.
"""
def __init__(self, repeat_factors, *, shuffle=True, seed=None):
"""
Args:
repeat_factors (Tensor): a float vector, the repeat factor for each indice. When it's
full of ones, it is equivalent to ``TrainingSampler(len(repeat_factors), ...)``.
shuffle (bool): whether to shuffle the indices or not
seed (int): the initial seed of the shuffle. Must be the same
across all workers. If None, will use a random seed shared
among workers (require synchronization among all workers).
"""
self._shuffle = shuffle
if seed is None:
seed = comm.shared_random_seed()
self._seed = int(seed)
self._rank = comm.get_rank()
self._world_size = comm.get_world_size()
# Split into whole number (_int_part) and fractional (_frac_part) parts.
self._int_part = torch.trunc(repeat_factors)
self._frac_part = repeat_factors - self._int_part
@staticmethod
def repeat_factors_from_category_frequency(dataset_dicts, repeat_thresh):
"""
Compute (fractional) per-image repeat factors based on category frequency.
The repeat factor for an image is a function of the frequency of the rarest
category labeled in that image. The "frequency of category c" in [0, 1] is defined
as the fraction of images in the training set (without repeats) in which category c
appears.
See :paper:`lvis` (>= v2) Appendix B.2.
Args:
dataset_dicts (list[dict]): annotations in Detectron2 dataset format.
repeat_thresh (float): frequency threshold below which data is repeated.
If the frequency is half of `repeat_thresh`, the image will be
repeated twice.
Returns:
torch.Tensor:
the i-th element is the repeat factor for the dataset image at index i.
"""
# 1. For each category c, compute the fraction of images that contain it: f(c)
category_freq = defaultdict(int)
for dataset_dict in dataset_dicts: # For each image (without repeats)
cat_ids = {ann["category_id"] for ann in dataset_dict["annotations"]}
for cat_id in cat_ids:
category_freq[cat_id] += 1
num_images = len(dataset_dicts)
for k, v in category_freq.items():
category_freq[k] = v / num_images
# 2. For each category c, compute the category-level repeat factor:
# r(c) = max(1, sqrt(t / f(c)))
category_rep = {
cat_id: max(1.0, math.sqrt(repeat_thresh / cat_freq))
for cat_id, cat_freq in category_freq.items()
}
# 3. For each image I, compute the image-level repeat factor:
# r(I) = max_{c in I} r(c)
rep_factors = []
for dataset_dict in dataset_dicts:
cat_ids = {ann["category_id"] for ann in dataset_dict["annotations"]}
rep_factor = max({category_rep[cat_id] for cat_id in cat_ids}, default=1.0)
rep_factors.append(rep_factor)
return torch.tensor(rep_factors, dtype=torch.float32)
def _get_epoch_indices(self, generator):
"""
Create a list of dataset indices (with repeats) to use for one epoch.
Args:
generator (torch.Generator): pseudo random number generator used for
stochastic rounding.
Returns:
torch.Tensor: list of dataset indices to use in one epoch. Each index
is repeated based on its calculated repeat factor.
"""
# Since repeat factors are fractional, we use stochastic rounding so
# that the target repeat factor is achieved in expectation over the
# course of training
rands = torch.rand(len(self._frac_part), generator=generator)
rep_factors = self._int_part + (rands < self._frac_part).float()
# Construct a list of indices in which we repeat images as specified
indices = []
for dataset_index, rep_factor in enumerate(rep_factors):
indices.extend([dataset_index] * int(rep_factor.item()))
return torch.tensor(indices, dtype=torch.int64)
def __iter__(self):
start = self._rank
yield from itertools.islice(self._infinite_indices(), start, None, self._world_size)
def _infinite_indices(self):
g = torch.Generator()
g.manual_seed(self._seed)
while True:
# Sample indices with repeats determined by stochastic rounding; each
# "epoch" may have a slightly different size due to the rounding.
indices = self._get_epoch_indices(g)
if self._shuffle:
randperm = torch.randperm(len(indices), generator=g)
yield from indices[randperm].tolist()
else:
yield from indices.tolist()
class InferenceSampler(Sampler):
"""
Produce indices for inference across all workers.
Inference needs to run on the __exact__ set of samples,
therefore when the total number of samples is not divisible by the number of workers,
this sampler produces different number of samples on different workers.
"""
def __init__(self, size: int):
"""
Args:
size (int): the total number of data of the underlying dataset to sample from
"""
self._size = size
assert size > 0
self._rank = comm.get_rank()
self._world_size = comm.get_world_size()
shard_size = (self._size - 1) // self._world_size + 1
begin = shard_size * self._rank
end = min(shard_size * (self._rank + 1), self._size)
self._local_indices = range(begin, end)
def __iter__(self):
yield from self._local_indices
def __len__(self):
return len(self._local_indices)
|
banmo-main
|
third_party/detectron2_old/detectron2/data/samplers/distributed_sampler.py
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates.
import datetime
import itertools
import logging
import os
import tempfile
import time
from collections import Counter
import torch
from fvcore.common.checkpoint import PeriodicCheckpointer as _PeriodicCheckpointer
from fvcore.common.param_scheduler import ParamScheduler
from fvcore.common.timer import Timer
from fvcore.nn.precise_bn import get_bn_modules, update_bn_stats
import detectron2.utils.comm as comm
from detectron2.evaluation.testing import flatten_results_dict
from detectron2.solver import LRMultiplier
from detectron2.utils.events import EventStorage, EventWriter
from detectron2.utils.file_io import PathManager
from .train_loop import HookBase
__all__ = [
"CallbackHook",
"IterationTimer",
"PeriodicWriter",
"PeriodicCheckpointer",
"LRScheduler",
"AutogradProfiler",
"EvalHook",
"PreciseBN",
]
"""
Implement some common hooks.
"""
class CallbackHook(HookBase):
"""
Create a hook using callback functions provided by the user.
"""
def __init__(self, *, before_train=None, after_train=None, before_step=None, after_step=None):
"""
Each argument is a function that takes one argument: the trainer.
"""
self._before_train = before_train
self._before_step = before_step
self._after_step = after_step
self._after_train = after_train
def before_train(self):
if self._before_train:
self._before_train(self.trainer)
def after_train(self):
if self._after_train:
self._after_train(self.trainer)
# The functions may be closures that hold reference to the trainer
# Therefore, delete them to avoid circular reference.
del self._before_train, self._after_train
del self._before_step, self._after_step
def before_step(self):
if self._before_step:
self._before_step(self.trainer)
def after_step(self):
if self._after_step:
self._after_step(self.trainer)
class IterationTimer(HookBase):
"""
Track the time spent for each iteration (each run_step call in the trainer).
Print a summary in the end of training.
This hook uses the time between the call to its :meth:`before_step`
and :meth:`after_step` methods.
Under the convention that :meth:`before_step` of all hooks should only
take negligible amount of time, the :class:`IterationTimer` hook should be
placed at the beginning of the list of hooks to obtain accurate timing.
"""
def __init__(self, warmup_iter=3):
"""
Args:
warmup_iter (int): the number of iterations at the beginning to exclude
from timing.
"""
self._warmup_iter = warmup_iter
self._step_timer = Timer()
self._start_time = time.perf_counter()
self._total_timer = Timer()
def before_train(self):
self._start_time = time.perf_counter()
self._total_timer.reset()
self._total_timer.pause()
def after_train(self):
logger = logging.getLogger(__name__)
total_time = time.perf_counter() - self._start_time
total_time_minus_hooks = self._total_timer.seconds()
hook_time = total_time - total_time_minus_hooks
num_iter = self.trainer.iter + 1 - self.trainer.start_iter - self._warmup_iter
if num_iter > 0 and total_time_minus_hooks > 0:
# Speed is meaningful only after warmup
# NOTE this format is parsed by grep in some scripts
logger.info(
"Overall training speed: {} iterations in {} ({:.4f} s / it)".format(
num_iter,
str(datetime.timedelta(seconds=int(total_time_minus_hooks))),
total_time_minus_hooks / num_iter,
)
)
logger.info(
"Total training time: {} ({} on hooks)".format(
str(datetime.timedelta(seconds=int(total_time))),
str(datetime.timedelta(seconds=int(hook_time))),
)
)
def before_step(self):
self._step_timer.reset()
self._total_timer.resume()
def after_step(self):
# +1 because we're in after_step, the current step is done
# but not yet counted
iter_done = self.trainer.iter - self.trainer.start_iter + 1
if iter_done >= self._warmup_iter:
sec = self._step_timer.seconds()
self.trainer.storage.put_scalars(time=sec)
else:
self._start_time = time.perf_counter()
self._total_timer.reset()
self._total_timer.pause()
class PeriodicWriter(HookBase):
"""
Write events to EventStorage (by calling ``writer.write()``) periodically.
It is executed every ``period`` iterations and after the last iteration.
Note that ``period`` does not affect how data is smoothed by each writer.
"""
def __init__(self, writers, period=20):
"""
Args:
writers (list[EventWriter]): a list of EventWriter objects
period (int):
"""
self._writers = writers
for w in writers:
assert isinstance(w, EventWriter), w
self._period = period
def after_step(self):
if (self.trainer.iter + 1) % self._period == 0 or (
self.trainer.iter == self.trainer.max_iter - 1
):
for writer in self._writers:
writer.write()
def after_train(self):
for writer in self._writers:
# If any new data is found (e.g. produced by other after_train),
# write them before closing
writer.write()
writer.close()
class PeriodicCheckpointer(_PeriodicCheckpointer, HookBase):
"""
Same as :class:`detectron2.checkpoint.PeriodicCheckpointer`, but as a hook.
Note that when used as a hook,
it is unable to save additional data other than what's defined
by the given `checkpointer`.
It is executed every ``period`` iterations and after the last iteration.
"""
def before_train(self):
self.max_iter = self.trainer.max_iter
def after_step(self):
# No way to use **kwargs
self.step(self.trainer.iter)
class LRScheduler(HookBase):
"""
A hook which executes a torch builtin LR scheduler and summarizes the LR.
It is executed after every iteration.
"""
def __init__(self, optimizer=None, scheduler=None):
"""
Args:
optimizer (torch.optim.Optimizer):
scheduler (torch.optim.LRScheduler or fvcore.common.param_scheduler.ParamScheduler):
if a :class:`ParamScheduler` object, it defines the multiplier over the base LR
in the optimizer.
If any argument is not given, will try to obtain it from the trainer.
"""
self._optimizer = optimizer
self._scheduler = scheduler
def before_train(self):
self._optimizer = self._optimizer or self.trainer.optimizer
if isinstance(self.scheduler, ParamScheduler):
self._scheduler = LRMultiplier(
self._optimizer,
self.scheduler,
self.trainer.max_iter,
last_iter=self.trainer.iter - 1,
)
# NOTE: some heuristics on what LR to summarize
# summarize the param group with most parameters
largest_group = max(len(g["params"]) for g in self._optimizer.param_groups)
if largest_group == 1:
# If all groups have one parameter,
# then find the most common initial LR, and use it for summary
lr_count = Counter([g["lr"] for g in self._optimizer.param_groups])
lr = lr_count.most_common()[0][0]
for i, g in enumerate(self._optimizer.param_groups):
if g["lr"] == lr:
self._best_param_group_id = i
break
else:
for i, g in enumerate(self._optimizer.param_groups):
if len(g["params"]) == largest_group:
self._best_param_group_id = i
break
def after_step(self):
lr = self._optimizer.param_groups[self._best_param_group_id]["lr"]
self.trainer.storage.put_scalar("lr", lr, smoothing_hint=False)
self.scheduler.step()
@property
def scheduler(self):
return self._scheduler or self.trainer.scheduler
def state_dict(self):
if isinstance(self.scheduler, torch.optim.lr_scheduler._LRScheduler):
return self.scheduler.state_dict()
return {}
def load_state_dict(self, state_dict):
if isinstance(self.scheduler, torch.optim.lr_scheduler._LRScheduler):
logger = logging.getLogger(__name__)
logger.info("Loading scheduler from state_dict ...")
self.scheduler.load_state_dict(state_dict)
class AutogradProfiler(HookBase):
"""
A hook which runs `torch.autograd.profiler.profile`.
Examples:
::
hooks.AutogradProfiler(
lambda trainer: trainer.iter > 10 and trainer.iter < 20, self.cfg.OUTPUT_DIR
)
The above example will run the profiler for iteration 10~20 and dump
results to ``OUTPUT_DIR``. We did not profile the first few iterations
because they are typically slower than the rest.
The result files can be loaded in the ``chrome://tracing`` page in chrome browser.
Note:
When used together with NCCL on older version of GPUs,
autograd profiler may cause deadlock because it unnecessarily allocates
memory on every device it sees. The memory management calls, if
interleaved with NCCL calls, lead to deadlock on GPUs that do not
support ``cudaLaunchCooperativeKernelMultiDevice``.
"""
def __init__(self, enable_predicate, output_dir, *, use_cuda=True):
"""
Args:
enable_predicate (callable[trainer -> bool]): a function which takes a trainer,
and returns whether to enable the profiler.
It will be called once every step, and can be used to select which steps to profile.
output_dir (str): the output directory to dump tracing files.
use_cuda (bool): same as in `torch.autograd.profiler.profile`.
"""
self._enable_predicate = enable_predicate
self._use_cuda = use_cuda
self._output_dir = output_dir
def before_step(self):
if self._enable_predicate(self.trainer):
self._profiler = torch.autograd.profiler.profile(use_cuda=self._use_cuda)
self._profiler.__enter__()
else:
self._profiler = None
def after_step(self):
if self._profiler is None:
return
self._profiler.__exit__(None, None, None)
PathManager.mkdirs(self._output_dir)
out_file = os.path.join(
self._output_dir, "profiler-trace-iter{}.json".format(self.trainer.iter)
)
if "://" not in out_file:
self._profiler.export_chrome_trace(out_file)
else:
# Support non-posix filesystems
with tempfile.TemporaryDirectory(prefix="detectron2_profiler") as d:
tmp_file = os.path.join(d, "tmp.json")
self._profiler.export_chrome_trace(tmp_file)
with open(tmp_file) as f:
content = f.read()
with PathManager.open(out_file, "w") as f:
f.write(content)
class EvalHook(HookBase):
"""
Run an evaluation function periodically, and at the end of training.
It is executed every ``eval_period`` iterations and after the last iteration.
"""
def __init__(self, eval_period, eval_function):
"""
Args:
eval_period (int): the period to run `eval_function`. Set to 0 to
not evaluate periodically (but still after the last iteration).
eval_function (callable): a function which takes no arguments, and
returns a nested dict of evaluation metrics.
Note:
This hook must be enabled in all or none workers.
If you would like only certain workers to perform evaluation,
give other workers a no-op function (`eval_function=lambda: None`).
"""
self._period = eval_period
self._func = eval_function
def _do_eval(self):
results = self._func()
if results:
assert isinstance(
results, dict
), "Eval function must return a dict. Got {} instead.".format(results)
flattened_results = flatten_results_dict(results)
for k, v in flattened_results.items():
try:
v = float(v)
except Exception as e:
raise ValueError(
"[EvalHook] eval_function should return a nested dict of float. "
"Got '{}: {}' instead.".format(k, v)
) from e
self.trainer.storage.put_scalars(**flattened_results, smoothing_hint=False)
# Evaluation may take different time among workers.
# A barrier make them start the next iteration together.
comm.synchronize()
def after_step(self):
next_iter = self.trainer.iter + 1
if self._period > 0 and next_iter % self._period == 0:
# do the last eval in after_train
if next_iter != self.trainer.max_iter:
self._do_eval()
def after_train(self):
# This condition is to prevent the eval from running after a failed training
if self.trainer.iter + 1 >= self.trainer.max_iter:
self._do_eval()
# func is likely a closure that holds reference to the trainer
# therefore we clean it to avoid circular reference in the end
del self._func
class PreciseBN(HookBase):
"""
The standard implementation of BatchNorm uses EMA in inference, which is
sometimes suboptimal.
This class computes the true average of statistics rather than the moving average,
and put true averages to every BN layer in the given model.
It is executed every ``period`` iterations and after the last iteration.
"""
def __init__(self, period, model, data_loader, num_iter):
"""
Args:
period (int): the period this hook is run, or 0 to not run during training.
The hook will always run in the end of training.
model (nn.Module): a module whose all BN layers in training mode will be
updated by precise BN.
Note that user is responsible for ensuring the BN layers to be
updated are in training mode when this hook is triggered.
data_loader (iterable): it will produce data to be run by `model(data)`.
num_iter (int): number of iterations used to compute the precise
statistics.
"""
self._logger = logging.getLogger(__name__)
if len(get_bn_modules(model)) == 0:
self._logger.info(
"PreciseBN is disabled because model does not contain BN layers in training mode."
)
self._disabled = True
return
self._model = model
self._data_loader = data_loader
self._num_iter = num_iter
self._period = period
self._disabled = False
self._data_iter = None
def after_step(self):
next_iter = self.trainer.iter + 1
is_final = next_iter == self.trainer.max_iter
if is_final or (self._period > 0 and next_iter % self._period == 0):
self.update_stats()
def update_stats(self):
"""
Update the model with precise statistics. Users can manually call this method.
"""
if self._disabled:
return
if self._data_iter is None:
self._data_iter = iter(self._data_loader)
def data_loader():
for num_iter in itertools.count(1):
if num_iter % 100 == 0:
self._logger.info(
"Running precise-BN ... {}/{} iterations.".format(num_iter, self._num_iter)
)
# This way we can reuse the same iterator
yield next(self._data_iter)
with EventStorage(): # capture events in a new storage to discard them
self._logger.info(
"Running precise-BN for {} iterations... ".format(self._num_iter)
+ "Note that this could produce different statistics every time."
)
update_bn_stats(self._model, data_loader(), self._num_iter)
|
banmo-main
|
third_party/detectron2_old/detectron2/engine/hooks.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
from .launch import *
from .train_loop import *
__all__ = [k for k in globals().keys() if not k.startswith("_")]
# prefer to let hooks and defaults live in separate namespaces (therefore not in __all__)
# but still make them available here
from .hooks import *
from .defaults import *
|
banmo-main
|
third_party/detectron2_old/detectron2/engine/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates.
import logging
import numpy as np
import time
import weakref
from typing import Dict, List, Optional
import torch
from torch.nn.parallel import DataParallel, DistributedDataParallel
import detectron2.utils.comm as comm
from detectron2.utils.events import EventStorage, get_event_storage
from detectron2.utils.logger import _log_api_usage
__all__ = ["HookBase", "TrainerBase", "SimpleTrainer", "AMPTrainer"]
class HookBase:
"""
Base class for hooks that can be registered with :class:`TrainerBase`.
Each hook can implement 4 methods. The way they are called is demonstrated
in the following snippet:
::
hook.before_train()
for iter in range(start_iter, max_iter):
hook.before_step()
trainer.run_step()
hook.after_step()
iter += 1
hook.after_train()
Notes:
1. In the hook method, users can access ``self.trainer`` to access more
properties about the context (e.g., model, current iteration, or config
if using :class:`DefaultTrainer`).
2. A hook that does something in :meth:`before_step` can often be
implemented equivalently in :meth:`after_step`.
If the hook takes non-trivial time, it is strongly recommended to
implement the hook in :meth:`after_step` instead of :meth:`before_step`.
The convention is that :meth:`before_step` should only take negligible time.
Following this convention will allow hooks that do care about the difference
between :meth:`before_step` and :meth:`after_step` (e.g., timer) to
function properly.
"""
trainer: "TrainerBase" = None
"""
A weak reference to the trainer object. Set by the trainer when the hook is registered.
"""
def before_train(self):
"""
Called before the first iteration.
"""
pass
def after_train(self):
"""
Called after the last iteration.
"""
pass
def before_step(self):
"""
Called before each iteration.
"""
pass
def after_step(self):
"""
Called after each iteration.
"""
pass
def state_dict(self):
"""
Hooks are stateless by default, but can be made checkpointable by
implementing `state_dict` and `load_state_dict`.
"""
return {}
class TrainerBase:
"""
Base class for iterative trainer with hooks.
The only assumption we made here is: the training runs in a loop.
A subclass can implement what the loop is.
We made no assumptions about the existence of dataloader, optimizer, model, etc.
Attributes:
iter(int): the current iteration.
start_iter(int): The iteration to start with.
By convention the minimum possible value is 0.
max_iter(int): The iteration to end training.
storage(EventStorage): An EventStorage that's opened during the course of training.
"""
def __init__(self) -> None:
self._hooks: List[HookBase] = []
self.iter: int = 0
self.start_iter: int = 0
self.max_iter: int
self.storage: EventStorage
_log_api_usage("trainer." + self.__class__.__name__)
def register_hooks(self, hooks: List[Optional[HookBase]]) -> None:
"""
Register hooks to the trainer. The hooks are executed in the order
they are registered.
Args:
hooks (list[Optional[HookBase]]): list of hooks
"""
hooks = [h for h in hooks if h is not None]
for h in hooks:
assert isinstance(h, HookBase)
# To avoid circular reference, hooks and trainer cannot own each other.
# This normally does not matter, but will cause memory leak if the
# involved objects contain __del__:
# See http://engineering.hearsaysocial.com/2013/06/16/circular-references-in-python/
h.trainer = weakref.proxy(self)
self._hooks.extend(hooks)
def train(self, start_iter: int, max_iter: int):
"""
Args:
start_iter, max_iter (int): See docs above
"""
logger = logging.getLogger(__name__)
logger.info("Starting training from iteration {}".format(start_iter))
self.iter = self.start_iter = start_iter
self.max_iter = max_iter
with EventStorage(start_iter) as self.storage:
try:
self.before_train()
for self.iter in range(start_iter, max_iter):
self.before_step()
self.run_step()
self.after_step()
# self.iter == max_iter can be used by `after_train` to
# tell whether the training successfully finished or failed
# due to exceptions.
self.iter += 1
except Exception:
logger.exception("Exception during training:")
raise
finally:
self.after_train()
def before_train(self):
for h in self._hooks:
h.before_train()
def after_train(self):
self.storage.iter = self.iter
for h in self._hooks:
h.after_train()
def before_step(self):
# Maintain the invariant that storage.iter == trainer.iter
# for the entire execution of each step
self.storage.iter = self.iter
for h in self._hooks:
h.before_step()
def after_step(self):
for h in self._hooks:
h.after_step()
def run_step(self):
raise NotImplementedError
def state_dict(self):
ret = {"iteration": self.iter}
hooks_state = {}
for h in self._hooks:
sd = h.state_dict()
if sd:
name = type(h).__qualname__
if name in hooks_state:
# TODO handle repetitive stateful hooks
continue
hooks_state[name] = sd
if hooks_state:
ret["hooks"] = hooks_state
return ret
def load_state_dict(self, state_dict):
logger = logging.getLogger(__name__)
self.iter = state_dict["iteration"]
for key, value in state_dict.get("hooks", {}).items():
for h in self._hooks:
try:
name = type(h).__qualname__
except AttributeError:
continue
if name == key:
h.load_state_dict(value)
break
else:
logger.warning(f"Cannot find the hook '{key}', its state_dict is ignored.")
class SimpleTrainer(TrainerBase):
"""
A simple trainer for the most common type of task:
single-cost single-optimizer single-data-source iterative optimization,
optionally using data-parallelism.
It assumes that every step, you:
1. Compute the loss with a data from the data_loader.
2. Compute the gradients with the above loss.
3. Update the model with the optimizer.
All other tasks during training (checkpointing, logging, evaluation, LR schedule)
are maintained by hooks, which can be registered by :meth:`TrainerBase.register_hooks`.
If you want to do anything fancier than this,
either subclass TrainerBase and implement your own `run_step`,
or write your own training loop.
"""
def __init__(self, model, data_loader, optimizer):
"""
Args:
model: a torch Module. Takes a data from data_loader and returns a
dict of losses.
data_loader: an iterable. Contains data to be used to call model.
optimizer: a torch optimizer.
"""
super().__init__()
"""
We set the model to training mode in the trainer.
However it's valid to train a model that's in eval mode.
If you want your model (or a submodule of it) to behave
like evaluation during training, you can overwrite its train() method.
"""
model.train()
self.model = model
self.data_loader = data_loader
self._data_loader_iter = iter(data_loader)
self.optimizer = optimizer
def run_step(self):
"""
Implement the standard training logic described above.
"""
assert self.model.training, "[SimpleTrainer] model was changed to eval mode!"
start = time.perf_counter()
"""
If you want to do something with the data, you can wrap the dataloader.
"""
data = next(self._data_loader_iter)
data_time = time.perf_counter() - start
"""
If you want to do something with the losses, you can wrap the model.
"""
loss_dict = self.model(data)
if isinstance(loss_dict, torch.Tensor):
losses = loss_dict
loss_dict = {"total_loss": loss_dict}
else:
losses = sum(loss_dict.values())
"""
If you need to accumulate gradients or do something similar, you can
wrap the optimizer with your custom `zero_grad()` method.
"""
self.optimizer.zero_grad()
losses.backward()
self._write_metrics(loss_dict, data_time)
"""
If you need gradient clipping/scaling or other processing, you can
wrap the optimizer with your custom `step()` method. But it is
suboptimal as explained in https://arxiv.org/abs/2006.15704 Sec 3.2.4
"""
self.optimizer.step()
def _write_metrics(
self,
loss_dict: Dict[str, torch.Tensor],
data_time: float,
prefix: str = "",
):
"""
Args:
loss_dict (dict): dict of scalar losses
data_time (float): time taken by the dataloader iteration
"""
metrics_dict = {k: v.detach().cpu().item() for k, v in loss_dict.items()}
metrics_dict["data_time"] = data_time
# Gather metrics among all workers for logging
# This assumes we do DDP-style training, which is currently the only
# supported method in detectron2.
all_metrics_dict = comm.gather(metrics_dict)
if comm.is_main_process():
storage = get_event_storage()
# data_time among workers can have high variance. The actual latency
# caused by data_time is the maximum among workers.
data_time = np.max([x.pop("data_time") for x in all_metrics_dict])
storage.put_scalar("data_time", data_time)
# average the rest metrics
metrics_dict = {
k: np.mean([x[k] for x in all_metrics_dict]) for k in all_metrics_dict[0].keys()
}
total_losses_reduced = sum(metrics_dict.values())
if not np.isfinite(total_losses_reduced):
raise FloatingPointError(
f"Loss became infinite or NaN at iteration={self.iter}!\n"
f"loss_dict = {metrics_dict}"
)
storage.put_scalar("{}total_loss".format(prefix), total_losses_reduced)
if len(metrics_dict) > 1:
storage.put_scalars(**metrics_dict)
def state_dict(self):
ret = super().state_dict()
ret["optimizer"] = self.optimizer.state_dict()
return ret
def load_state_dict(self, state_dict):
super().load_state_dict(state_dict)
self.optimizer.load_state_dict(state_dict["optimizer"])
class AMPTrainer(SimpleTrainer):
"""
Like :class:`SimpleTrainer`, but uses PyTorch's native automatic mixed precision
in the training loop.
"""
def __init__(self, model, data_loader, optimizer, grad_scaler=None):
"""
Args:
model, data_loader, optimizer: same as in :class:`SimpleTrainer`.
grad_scaler: torch GradScaler to automatically scale gradients.
"""
unsupported = "AMPTrainer does not support single-process multi-device training!"
if isinstance(model, DistributedDataParallel):
assert not (model.device_ids and len(model.device_ids) > 1), unsupported
assert not isinstance(model, DataParallel), unsupported
super().__init__(model, data_loader, optimizer)
if grad_scaler is None:
from torch.cuda.amp import GradScaler
grad_scaler = GradScaler()
self.grad_scaler = grad_scaler
def run_step(self):
"""
Implement the AMP training logic.
"""
assert self.model.training, "[AMPTrainer] model was changed to eval mode!"
assert torch.cuda.is_available(), "[AMPTrainer] CUDA is required for AMP training!"
from torch.cuda.amp import autocast
start = time.perf_counter()
data = next(self._data_loader_iter)
data_time = time.perf_counter() - start
with autocast():
loss_dict = self.model(data)
if isinstance(loss_dict, torch.Tensor):
losses = loss_dict
loss_dict = {"total_loss": loss_dict}
else:
losses = sum(loss_dict.values())
self.optimizer.zero_grad()
self.grad_scaler.scale(losses).backward()
self._write_metrics(loss_dict, data_time)
self.grad_scaler.step(self.optimizer)
self.grad_scaler.update()
def state_dict(self):
ret = super().state_dict()
ret["grad_scaler"] = self.grad_scaler.state_dict()
return ret
def load_state_dict(self, state_dict):
super().load_state_dict(state_dict)
self.grad_scaler.load_state_dict(state_dict["grad_scaler"])
|
banmo-main
|
third_party/detectron2_old/detectron2/engine/train_loop.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
import logging
from datetime import timedelta
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
from detectron2.utils import comm
__all__ = ["DEFAULT_TIMEOUT", "launch"]
DEFAULT_TIMEOUT = timedelta(minutes=30)
def _find_free_port():
import socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Binding to port 0 will cause the OS to find an available port for us
sock.bind(("", 0))
port = sock.getsockname()[1]
sock.close()
# NOTE: there is still a chance the port could be taken by other processes.
return port
def launch(
main_func,
num_gpus_per_machine,
num_machines=1,
machine_rank=0,
dist_url=None,
args=(),
timeout=DEFAULT_TIMEOUT,
):
"""
Launch multi-gpu or distributed training.
This function must be called on all machines involved in the training.
It will spawn child processes (defined by ``num_gpus_per_machine``) on each machine.
Args:
main_func: a function that will be called by `main_func(*args)`
num_gpus_per_machine (int): number of GPUs per machine
num_machines (int): the total number of machines
machine_rank (int): the rank of this machine
dist_url (str): url to connect to for distributed jobs, including protocol
e.g. "tcp://127.0.0.1:8686".
Can be set to "auto" to automatically select a free port on localhost
timeout (timedelta): timeout of the distributed workers
args (tuple): arguments passed to main_func
"""
world_size = num_machines * num_gpus_per_machine
if world_size > 1:
# https://github.com/pytorch/pytorch/pull/14391
# TODO prctl in spawned processes
if dist_url == "auto":
assert num_machines == 1, "dist_url=auto not supported in multi-machine jobs."
port = _find_free_port()
dist_url = f"tcp://127.0.0.1:{port}"
if num_machines > 1 and dist_url.startswith("file://"):
logger = logging.getLogger(__name__)
logger.warning(
"file:// is not a reliable init_method in multi-machine jobs. Prefer tcp://"
)
mp.spawn(
_distributed_worker,
nprocs=num_gpus_per_machine,
args=(
main_func,
world_size,
num_gpus_per_machine,
machine_rank,
dist_url,
args,
timeout,
),
daemon=False,
)
else:
main_func(*args)
def _distributed_worker(
local_rank,
main_func,
world_size,
num_gpus_per_machine,
machine_rank,
dist_url,
args,
timeout=DEFAULT_TIMEOUT,
):
assert torch.cuda.is_available(), "cuda is not available. Please check your installation."
global_rank = machine_rank * num_gpus_per_machine + local_rank
try:
dist.init_process_group(
backend="NCCL",
init_method=dist_url,
world_size=world_size,
rank=global_rank,
timeout=timeout,
)
except Exception as e:
logger = logging.getLogger(__name__)
logger.error("Process group URL: {}".format(dist_url))
raise e
# synchronize is needed here to prevent a possible timeout after calling init_process_group
# See: https://github.com/facebookresearch/maskrcnn-benchmark/issues/172
comm.synchronize()
assert num_gpus_per_machine <= torch.cuda.device_count()
torch.cuda.set_device(local_rank)
# Setup the local process group (which contains ranks within the same machine)
assert comm._LOCAL_PROCESS_GROUP is None
num_machines = world_size // num_gpus_per_machine
for i in range(num_machines):
ranks_on_i = list(range(i * num_gpus_per_machine, (i + 1) * num_gpus_per_machine))
pg = dist.new_group(ranks_on_i)
if i == machine_rank:
comm._LOCAL_PROCESS_GROUP = pg
main_func(*args)
|
banmo-main
|
third_party/detectron2_old/detectron2/engine/launch.py
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates.
"""
This file contains components with some default boilerplate logic user may need
in training / testing. They will not work for everyone, but many users may find them useful.
The behavior of functions/classes in this file is subject to change,
since they are meant to represent the "common default behavior" people need in their projects.
"""
import argparse
import logging
import os
import sys
import weakref
from collections import OrderedDict
from typing import Optional
import torch
from fvcore.nn.precise_bn import get_bn_modules
from omegaconf import OmegaConf
from torch.nn.parallel import DistributedDataParallel
import detectron2.data.transforms as T
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.config import CfgNode, LazyConfig
from detectron2.data import (
MetadataCatalog,
build_detection_test_loader,
build_detection_train_loader,
)
from detectron2.evaluation import (
DatasetEvaluator,
inference_on_dataset,
print_csv_format,
verify_results,
)
from detectron2.modeling import build_model
from detectron2.solver import build_lr_scheduler, build_optimizer
from detectron2.utils import comm
from detectron2.utils.collect_env import collect_env_info
from detectron2.utils.env import seed_all_rng
from detectron2.utils.events import CommonMetricPrinter, JSONWriter, TensorboardXWriter
from detectron2.utils.file_io import PathManager
from detectron2.utils.logger import setup_logger
from . import hooks
from .train_loop import AMPTrainer, SimpleTrainer, TrainerBase
__all__ = [
"create_ddp_model",
"default_argument_parser",
"default_setup",
"default_writers",
"DefaultPredictor",
"DefaultTrainer",
]
def create_ddp_model(model, *, fp16_compression=False, **kwargs):
"""
Create a DistributedDataParallel model if there are >1 processes.
Args:
model: a torch.nn.Module
fp16_compression: add fp16 compression hooks to the ddp object.
See more at https://pytorch.org/docs/stable/ddp_comm_hooks.html#torch.distributed.algorithms.ddp_comm_hooks.default_hooks.fp16_compress_hook
kwargs: other arguments of :module:`torch.nn.parallel.DistributedDataParallel`.
""" # noqa
if comm.get_world_size() == 1:
return model
if "device_ids" not in kwargs:
kwargs["device_ids"] = [comm.get_local_rank()]
ddp = DistributedDataParallel(model, **kwargs)
if fp16_compression:
from torch.distributed.algorithms.ddp_comm_hooks import default as comm_hooks
ddp.register_comm_hook(state=None, hook=comm_hooks.fp16_compress_hook)
return ddp
def default_argument_parser(epilog=None):
"""
Create a parser with some common arguments used by detectron2 users.
Args:
epilog (str): epilog passed to ArgumentParser describing the usage.
Returns:
argparse.ArgumentParser:
"""
parser = argparse.ArgumentParser(
epilog=epilog
or f"""
Examples:
Run on single machine:
$ {sys.argv[0]} --num-gpus 8 --config-file cfg.yaml
Change some config options:
$ {sys.argv[0]} --config-file cfg.yaml MODEL.WEIGHTS /path/to/weight.pth SOLVER.BASE_LR 0.001
Run on multiple machines:
(machine0)$ {sys.argv[0]} --machine-rank 0 --num-machines 2 --dist-url <URL> [--other-flags]
(machine1)$ {sys.argv[0]} --machine-rank 1 --num-machines 2 --dist-url <URL> [--other-flags]
""",
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file")
parser.add_argument(
"--resume",
action="store_true",
help="Whether to attempt to resume from the checkpoint directory. "
"See documentation of `DefaultTrainer.resume_or_load()` for what it means.",
)
parser.add_argument("--eval-only", action="store_true", help="perform evaluation only")
parser.add_argument("--num-gpus", type=int, default=1, help="number of gpus *per machine*")
parser.add_argument("--num-machines", type=int, default=1, help="total number of machines")
parser.add_argument(
"--machine-rank", type=int, default=0, help="the rank of this machine (unique per machine)"
)
# PyTorch still may leave orphan processes in multi-gpu training.
# Therefore we use a deterministic way to obtain port,
# so that users are aware of orphan processes by seeing the port occupied.
port = 2 ** 15 + 2 ** 14 + hash(os.getuid() if sys.platform != "win32" else 1) % 2 ** 14
parser.add_argument(
"--dist-url",
default="tcp://127.0.0.1:{}".format(port),
help="initialization URL for pytorch distributed backend. See "
"https://pytorch.org/docs/stable/distributed.html for details.",
)
parser.add_argument(
"opts",
help="Modify config options by adding 'KEY VALUE' pairs at the end of the command. "
"See config references at "
"https://detectron2.readthedocs.io/modules/config.html#config-references",
default=None,
nargs=argparse.REMAINDER,
)
return parser
def _try_get_key(cfg, *keys, default=None):
"""
Try select keys from cfg until the first key that exists. Otherwise return default.
"""
if isinstance(cfg, CfgNode):
cfg = OmegaConf.create(cfg.dump())
for k in keys:
parts = k.split(".")
# https://github.com/omry/omegaconf/issues/674
for p in parts:
if p not in cfg:
break
cfg = OmegaConf.select(cfg, p)
else:
return cfg
return default
def _highlight(code, filename):
try:
import pygments
except ImportError:
return code
from pygments.lexers import Python3Lexer, YamlLexer
from pygments.formatters import Terminal256Formatter
lexer = Python3Lexer() if filename.endswith(".py") else YamlLexer()
code = pygments.highlight(code, lexer, Terminal256Formatter(style="monokai"))
return code
def default_setup(cfg, args):
"""
Perform some basic common setups at the beginning of a job, including:
1. Set up the detectron2 logger
2. Log basic information about environment, cmdline arguments, and config
3. Backup the config to the output directory
Args:
cfg (CfgNode or omegaconf.DictConfig): the full config to be used
args (argparse.NameSpace): the command line arguments to be logged
"""
output_dir = _try_get_key(cfg, "OUTPUT_DIR", "output_dir", "train.output_dir")
if comm.is_main_process() and output_dir:
PathManager.mkdirs(output_dir)
rank = comm.get_rank()
setup_logger(output_dir, distributed_rank=rank, name="fvcore")
logger = setup_logger(output_dir, distributed_rank=rank)
logger.info("Rank of current process: {}. World size: {}".format(rank, comm.get_world_size()))
logger.info("Environment info:\n" + collect_env_info())
logger.info("Command line arguments: " + str(args))
if hasattr(args, "config_file") and args.config_file != "":
logger.info(
"Contents of args.config_file={}:\n{}".format(
args.config_file,
_highlight(PathManager.open(args.config_file, "r").read(), args.config_file),
)
)
if comm.is_main_process() and output_dir:
# Note: some of our scripts may expect the existence of
# config.yaml in output directory
path = os.path.join(output_dir, "config.yaml")
if isinstance(cfg, CfgNode):
logger.info("Running with full config:\n{}".format(_highlight(cfg.dump(), ".yaml")))
with PathManager.open(path, "w") as f:
f.write(cfg.dump())
else:
LazyConfig.save(cfg, path)
logger.info("Full config saved to {}".format(path))
# make sure each worker has a different, yet deterministic seed if specified
seed = _try_get_key(cfg, "SEED", "train.seed", default=-1)
seed_all_rng(None if seed < 0 else seed + rank)
# cudnn benchmark has large overhead. It shouldn't be used considering the small size of
# typical validation set.
if not (hasattr(args, "eval_only") and args.eval_only):
torch.backends.cudnn.benchmark = _try_get_key(
cfg, "CUDNN_BENCHMARK", "train.cudnn_benchmark", default=False
)
def default_writers(output_dir: str, max_iter: Optional[int] = None):
"""
Build a list of :class:`EventWriter` to be used.
It now consists of a :class:`CommonMetricPrinter`,
:class:`TensorboardXWriter` and :class:`JSONWriter`.
Args:
output_dir: directory to store JSON metrics and tensorboard events
max_iter: the total number of iterations
Returns:
list[EventWriter]: a list of :class:`EventWriter` objects.
"""
return [
# It may not always print what you want to see, since it prints "common" metrics only.
CommonMetricPrinter(max_iter),
JSONWriter(os.path.join(output_dir, "metrics.json")),
TensorboardXWriter(output_dir),
]
class DefaultPredictor:
"""
Create a simple end-to-end predictor with the given config that runs on
single device for a single input image.
Compared to using the model directly, this class does the following additions:
1. Load checkpoint from `cfg.MODEL.WEIGHTS`.
2. Always take BGR image as the input and apply conversion defined by `cfg.INPUT.FORMAT`.
3. Apply resizing defined by `cfg.INPUT.{MIN,MAX}_SIZE_TEST`.
4. Take one input image and produce a single output, instead of a batch.
This is meant for simple demo purposes, so it does the above steps automatically.
This is not meant for benchmarks or running complicated inference logic.
If you'd like to do anything more fancy, please refer to its source code as examples
to build and use the model manually.
Attributes:
metadata (Metadata): the metadata of the underlying dataset, obtained from
cfg.DATASETS.TEST.
Examples:
::
pred = DefaultPredictor(cfg)
inputs = cv2.imread("input.jpg")
outputs = pred(inputs)
"""
def __init__(self, cfg):
self.cfg = cfg.clone() # cfg can be modified by model
self.model = build_model(self.cfg)
self.model.eval()
if len(cfg.DATASETS.TEST):
self.metadata = MetadataCatalog.get(cfg.DATASETS.TEST[0])
checkpointer = DetectionCheckpointer(self.model)
checkpointer.load(cfg.MODEL.WEIGHTS)
self.aug = T.ResizeShortestEdge(
[cfg.INPUT.MIN_SIZE_TEST, cfg.INPUT.MIN_SIZE_TEST], cfg.INPUT.MAX_SIZE_TEST
)
self.input_format = cfg.INPUT.FORMAT
assert self.input_format in ["RGB", "BGR"], self.input_format
def __call__(self, original_image):
"""
Args:
original_image (np.ndarray): an image of shape (H, W, C) (in BGR order).
Returns:
predictions (dict):
the output of the model for one image only.
See :doc:`/tutorials/models` for details about the format.
"""
with torch.no_grad(): # https://github.com/sphinx-doc/sphinx/issues/4258
# Apply pre-processing to image.
if self.input_format == "RGB":
# whether the model expects BGR inputs or RGB
original_image = original_image[:, :, ::-1]
height, width = original_image.shape[:2]
image = self.aug.get_transform(original_image).apply_image(original_image)
image = torch.as_tensor(image.astype("float32").transpose(2, 0, 1))
inputs = {"image": image, "height": height, "width": width}
predictions = self.model([inputs])[0]
return predictions
class DefaultTrainer(TrainerBase):
"""
A trainer with default training logic. It does the following:
1. Create a :class:`SimpleTrainer` using model, optimizer, dataloader
defined by the given config. Create a LR scheduler defined by the config.
2. Load the last checkpoint or `cfg.MODEL.WEIGHTS`, if exists, when
`resume_or_load` is called.
3. Register a few common hooks defined by the config.
It is created to simplify the **standard model training workflow** and reduce code boilerplate
for users who only need the standard training workflow, with standard features.
It means this class makes *many assumptions* about your training logic that
may easily become invalid in a new research. In fact, any assumptions beyond those made in the
:class:`SimpleTrainer` are too much for research.
The code of this class has been annotated about restrictive assumptions it makes.
When they do not work for you, you're encouraged to:
1. Overwrite methods of this class, OR:
2. Use :class:`SimpleTrainer`, which only does minimal SGD training and
nothing else. You can then add your own hooks if needed. OR:
3. Write your own training loop similar to `tools/plain_train_net.py`.
See the :doc:`/tutorials/training` tutorials for more details.
Note that the behavior of this class, like other functions/classes in
this file, is not stable, since it is meant to represent the "common default behavior".
It is only guaranteed to work well with the standard models and training workflow in detectron2.
To obtain more stable behavior, write your own training logic with other public APIs.
Examples:
::
trainer = DefaultTrainer(cfg)
trainer.resume_or_load() # load last checkpoint or MODEL.WEIGHTS
trainer.train()
Attributes:
scheduler:
checkpointer (DetectionCheckpointer):
cfg (CfgNode):
"""
def __init__(self, cfg):
"""
Args:
cfg (CfgNode):
"""
super().__init__()
logger = logging.getLogger("detectron2")
if not logger.isEnabledFor(logging.INFO): # setup_logger is not called for d2
setup_logger()
cfg = DefaultTrainer.auto_scale_workers(cfg, comm.get_world_size())
# Assume these objects must be constructed in this order.
model = self.build_model(cfg)
optimizer = self.build_optimizer(cfg, model)
data_loader = self.build_train_loader(cfg)
model = create_ddp_model(model, broadcast_buffers=False)
self._trainer = (AMPTrainer if cfg.SOLVER.AMP.ENABLED else SimpleTrainer)(
model, data_loader, optimizer
)
self.scheduler = self.build_lr_scheduler(cfg, optimizer)
self.checkpointer = DetectionCheckpointer(
# Assume you want to save checkpoints together with logs/statistics
model,
cfg.OUTPUT_DIR,
trainer=weakref.proxy(self),
)
self.start_iter = 0
self.max_iter = cfg.SOLVER.MAX_ITER
self.cfg = cfg
self.register_hooks(self.build_hooks())
def resume_or_load(self, resume=True):
"""
If `resume==True` and `cfg.OUTPUT_DIR` contains the last checkpoint (defined by
a `last_checkpoint` file), resume from the file. Resuming means loading all
available states (eg. optimizer and scheduler) and update iteration counter
from the checkpoint. ``cfg.MODEL.WEIGHTS`` will not be used.
Otherwise, this is considered as an independent training. The method will load model
weights from the file `cfg.MODEL.WEIGHTS` (but will not load other states) and start
from iteration 0.
Args:
resume (bool): whether to do resume or not
"""
self.checkpointer.resume_or_load(self.cfg.MODEL.WEIGHTS, resume=resume)
if resume and self.checkpointer.has_checkpoint():
# The checkpoint stores the training iteration that just finished, thus we start
# at the next iteration
self.start_iter = self.iter + 1
def build_hooks(self):
"""
Build a list of default hooks, including timing, evaluation,
checkpointing, lr scheduling, precise BN, writing events.
Returns:
list[HookBase]:
"""
cfg = self.cfg.clone()
cfg.defrost()
cfg.DATALOADER.NUM_WORKERS = 0 # save some memory and time for PreciseBN
ret = [
hooks.IterationTimer(),
hooks.LRScheduler(),
hooks.PreciseBN(
# Run at the same freq as (but before) evaluation.
cfg.TEST.EVAL_PERIOD,
self.model,
# Build a new data loader to not affect training
self.build_train_loader(cfg),
cfg.TEST.PRECISE_BN.NUM_ITER,
)
if cfg.TEST.PRECISE_BN.ENABLED and get_bn_modules(self.model)
else None,
]
# Do PreciseBN before checkpointer, because it updates the model and need to
# be saved by checkpointer.
# This is not always the best: if checkpointing has a different frequency,
# some checkpoints may have more precise statistics than others.
if comm.is_main_process():
ret.append(hooks.PeriodicCheckpointer(self.checkpointer, cfg.SOLVER.CHECKPOINT_PERIOD))
def test_and_save_results():
self._last_eval_results = self.test(self.cfg, self.model)
return self._last_eval_results
# Do evaluation after checkpointer, because then if it fails,
# we can use the saved checkpoint to debug.
ret.append(hooks.EvalHook(cfg.TEST.EVAL_PERIOD, test_and_save_results))
if comm.is_main_process():
# Here the default print/log frequency of each writer is used.
# run writers in the end, so that evaluation metrics are written
ret.append(hooks.PeriodicWriter(self.build_writers(), period=20))
return ret
def build_writers(self):
"""
Build a list of writers to be used using :func:`default_writers()`.
If you'd like a different list of writers, you can overwrite it in
your trainer.
Returns:
list[EventWriter]: a list of :class:`EventWriter` objects.
"""
return default_writers(self.cfg.OUTPUT_DIR, self.max_iter)
def train(self):
"""
Run training.
Returns:
OrderedDict of results, if evaluation is enabled. Otherwise None.
"""
super().train(self.start_iter, self.max_iter)
if len(self.cfg.TEST.EXPECTED_RESULTS) and comm.is_main_process():
assert hasattr(
self, "_last_eval_results"
), "No evaluation results obtained during training!"
verify_results(self.cfg, self._last_eval_results)
return self._last_eval_results
def run_step(self):
self._trainer.iter = self.iter
self._trainer.run_step()
@classmethod
def build_model(cls, cfg):
"""
Returns:
torch.nn.Module:
It now calls :func:`detectron2.modeling.build_model`.
Overwrite it if you'd like a different model.
"""
model = build_model(cfg)
logger = logging.getLogger(__name__)
logger.info("Model:\n{}".format(model))
return model
@classmethod
def build_optimizer(cls, cfg, model):
"""
Returns:
torch.optim.Optimizer:
It now calls :func:`detectron2.solver.build_optimizer`.
Overwrite it if you'd like a different optimizer.
"""
return build_optimizer(cfg, model)
@classmethod
def build_lr_scheduler(cls, cfg, optimizer):
"""
It now calls :func:`detectron2.solver.build_lr_scheduler`.
Overwrite it if you'd like a different scheduler.
"""
return build_lr_scheduler(cfg, optimizer)
@classmethod
def build_train_loader(cls, cfg):
"""
Returns:
iterable
It now calls :func:`detectron2.data.build_detection_train_loader`.
Overwrite it if you'd like a different data loader.
"""
return build_detection_train_loader(cfg)
@classmethod
def build_test_loader(cls, cfg, dataset_name):
"""
Returns:
iterable
It now calls :func:`detectron2.data.build_detection_test_loader`.
Overwrite it if you'd like a different data loader.
"""
return build_detection_test_loader(cfg, dataset_name)
@classmethod
def build_evaluator(cls, cfg, dataset_name):
"""
Returns:
DatasetEvaluator or None
It is not implemented by default.
"""
raise NotImplementedError(
"""
If you want DefaultTrainer to automatically run evaluation,
please implement `build_evaluator()` in subclasses (see train_net.py for example).
Alternatively, you can call evaluation functions yourself (see Colab balloon tutorial for example).
"""
)
@classmethod
def test(cls, cfg, model, evaluators=None):
"""
Args:
cfg (CfgNode):
model (nn.Module):
evaluators (list[DatasetEvaluator] or None): if None, will call
:meth:`build_evaluator`. Otherwise, must have the same length as
``cfg.DATASETS.TEST``.
Returns:
dict: a dict of result metrics
"""
logger = logging.getLogger(__name__)
if isinstance(evaluators, DatasetEvaluator):
evaluators = [evaluators]
if evaluators is not None:
assert len(cfg.DATASETS.TEST) == len(evaluators), "{} != {}".format(
len(cfg.DATASETS.TEST), len(evaluators)
)
results = OrderedDict()
for idx, dataset_name in enumerate(cfg.DATASETS.TEST):
data_loader = cls.build_test_loader(cfg, dataset_name)
# When evaluators are passed in as arguments,
# implicitly assume that evaluators can be created before data_loader.
if evaluators is not None:
evaluator = evaluators[idx]
else:
try:
evaluator = cls.build_evaluator(cfg, dataset_name)
except NotImplementedError:
logger.warn(
"No evaluator found. Use `DefaultTrainer.test(evaluators=)`, "
"or implement its `build_evaluator` method."
)
results[dataset_name] = {}
continue
results_i = inference_on_dataset(model, data_loader, evaluator)
results[dataset_name] = results_i
if comm.is_main_process():
assert isinstance(
results_i, dict
), "Evaluator must return a dict on the main process. Got {} instead.".format(
results_i
)
logger.info("Evaluation results for {} in csv format:".format(dataset_name))
print_csv_format(results_i)
if len(results) == 1:
results = list(results.values())[0]
return results
@staticmethod
def auto_scale_workers(cfg, num_workers: int):
"""
When the config is defined for certain number of workers (according to
``cfg.SOLVER.REFERENCE_WORLD_SIZE``) that's different from the number of
workers currently in use, returns a new cfg where the total batch size
is scaled so that the per-GPU batch size stays the same as the
original ``IMS_PER_BATCH // REFERENCE_WORLD_SIZE``.
Other config options are also scaled accordingly:
* training steps and warmup steps are scaled inverse proportionally.
* learning rate are scaled proportionally, following :paper:`ImageNet in 1h`.
For example, with the original config like the following:
.. code-block:: yaml
IMS_PER_BATCH: 16
BASE_LR: 0.1
REFERENCE_WORLD_SIZE: 8
MAX_ITER: 5000
STEPS: (4000,)
CHECKPOINT_PERIOD: 1000
When this config is used on 16 GPUs instead of the reference number 8,
calling this method will return a new config with:
.. code-block:: yaml
IMS_PER_BATCH: 32
BASE_LR: 0.2
REFERENCE_WORLD_SIZE: 16
MAX_ITER: 2500
STEPS: (2000,)
CHECKPOINT_PERIOD: 500
Note that both the original config and this new config can be trained on 16 GPUs.
It's up to user whether to enable this feature (by setting ``REFERENCE_WORLD_SIZE``).
Returns:
CfgNode: a new config. Same as original if ``cfg.SOLVER.REFERENCE_WORLD_SIZE==0``.
"""
old_world_size = cfg.SOLVER.REFERENCE_WORLD_SIZE
if old_world_size == 0 or old_world_size == num_workers:
return cfg
cfg = cfg.clone()
frozen = cfg.is_frozen()
cfg.defrost()
assert (
cfg.SOLVER.IMS_PER_BATCH % old_world_size == 0
), "Invalid REFERENCE_WORLD_SIZE in config!"
scale = num_workers / old_world_size
bs = cfg.SOLVER.IMS_PER_BATCH = int(round(cfg.SOLVER.IMS_PER_BATCH * scale))
lr = cfg.SOLVER.BASE_LR = cfg.SOLVER.BASE_LR * scale
max_iter = cfg.SOLVER.MAX_ITER = int(round(cfg.SOLVER.MAX_ITER / scale))
warmup_iter = cfg.SOLVER.WARMUP_ITERS = int(round(cfg.SOLVER.WARMUP_ITERS / scale))
cfg.SOLVER.STEPS = tuple(int(round(s / scale)) for s in cfg.SOLVER.STEPS)
cfg.TEST.EVAL_PERIOD = int(round(cfg.TEST.EVAL_PERIOD / scale))
cfg.SOLVER.CHECKPOINT_PERIOD = int(round(cfg.SOLVER.CHECKPOINT_PERIOD / scale))
cfg.SOLVER.REFERENCE_WORLD_SIZE = num_workers # maintain invariant
logger = logging.getLogger(__name__)
logger.info(
f"Auto-scaling the config to batch_size={bs}, learning_rate={lr}, "
f"max_iter={max_iter}, warmup={warmup_iter}."
)
if frozen:
cfg.freeze()
return cfg
# Access basic attributes from the underlying trainer
for _attr in ["model", "data_loader", "optimizer"]:
setattr(
DefaultTrainer,
_attr,
property(
# getter
lambda self, x=_attr: getattr(self._trainer, x),
# setter
lambda self, value, x=_attr: setattr(self._trainer, x, value),
),
)
|
banmo-main
|
third_party/detectron2_old/detectron2/engine/defaults.py
|
from __future__ import print_function
import sys
sys.path.insert(0,'../')
import cv2
import pdb
import argparse
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data
from torch.autograd import Variable
import torch.nn.functional as F
import time
from flowutils.io import mkdir_p
from flowutils.util_flow import write_flow, save_pfm
from flowutils.flowlib import point_vec
from flowutils.dydepth import warp_flow
import glob
cudnn.benchmark = False
parser = argparse.ArgumentParser(description='VCN+expansion')
parser.add_argument('--datapath', default='/ssd/kitti_scene/training/',
help='dataset path')
parser.add_argument('--loadmodel', default=None,
help='model path')
parser.add_argument('--testres', type=float, default=1,
help='resolution')
parser.add_argument('--maxdisp', type=int ,default=256,
help='maxium disparity. Only affect the coarsest cost volume size')
parser.add_argument('--fac', type=float ,default=1,
help='controls the shape of search grid. Only affect the coarse cost volume size')
parser.add_argument('--dframe', type=int ,default=1,
help='how many frames to skip')
args = parser.parse_args()
mean_L = [[0.33,0.33,0.33]]
mean_R = [[0.33,0.33,0.33]]
# construct model, VCN-expansion
from models.VCNplus import VCN
from models.VCNplus import WarpModule, flow_reg
model = VCN([1, 256, 256], md=[int(4*(args.maxdisp/256)),4,4,4,4], fac=args.fac)
model = nn.DataParallel(model, device_ids=[0])
model.cuda()
if args.loadmodel is not None:
pretrained_dict = torch.load(args.loadmodel)
mean_L=pretrained_dict['mean_L']
mean_R=pretrained_dict['mean_R']
pretrained_dict['state_dict'] = {k:v for k,v in pretrained_dict['state_dict'].items()}
model.load_state_dict(pretrained_dict['state_dict'],strict=False)
else:
print('dry run')
print('Number of model parameters: {}'.format(sum([p.data.nelement() for p in model.parameters()])))
seqname = args.datapath.strip().split('/')[-2]
dframe = args.dframe
mkdir_p('./%s/FlowFW_%d' % (seqname,dframe))
mkdir_p('./%s/FlowBW_%d' % (seqname,dframe))
test_left_img = sorted(glob.glob('%s/*'%(args.datapath)))
silhouettes = sorted(glob.glob('%s/*'%(args.datapath.replace('JPEGImages', 'Annotations'))))
def flow_inference(imgL_o, imgR_o):
# for gray input images
if len(imgL_o.shape) == 2:
imgL_o = np.tile(imgL_o[:,:,np.newaxis],(1,1,3))
imgR_o = np.tile(imgR_o[:,:,np.newaxis],(1,1,3))
# resize
# set test res
if args.testres == -1:
testres = np.sqrt(2*1e6/(imgL_o.shape[0]*imgL_o.shape[1]))
#testres = np.sqrt(1e6/(imgL_o.shape[0]*imgL_o.shape[1]))
else:
testres = args.testres
maxh = imgL_o.shape[0]*testres
maxw = imgL_o.shape[1]*testres
max_h = int(maxh // 64 * 64)
max_w = int(maxw // 64 * 64)
if max_h < maxh: max_h += 64
if max_w < maxw: max_w += 64
input_size = imgL_o.shape
imgL = cv2.resize(imgL_o,(max_w, max_h))
imgR = cv2.resize(imgR_o,(max_w, max_h))
imgL_noaug = torch.Tensor(imgL/255.)[np.newaxis].float().cuda()
# flip channel, subtract mean
imgL = imgL[:,:,::-1].copy() / 255. - np.asarray(mean_L).mean(0)[np.newaxis,np.newaxis,:]
imgR = imgR[:,:,::-1].copy() / 255. - np.asarray(mean_R).mean(0)[np.newaxis,np.newaxis,:]
imgL = np.transpose(imgL, [2,0,1])[np.newaxis]
imgR = np.transpose(imgR, [2,0,1])[np.newaxis]
# modify module according to inputs
for i in range(len(model.module.reg_modules)):
model.module.reg_modules[i] = flow_reg([1,max_w//(2**(6-i)), max_h//(2**(6-i))],
ent=getattr(model.module, 'flow_reg%d'%2**(6-i)).ent,\
maxdisp=getattr(model.module, 'flow_reg%d'%2**(6-i)).md,\
fac=getattr(model.module, 'flow_reg%d'%2**(6-i)).fac).cuda()
for i in range(len(model.module.warp_modules)):
model.module.warp_modules[i] = WarpModule([1,max_w//(2**(6-i)), max_h//(2**(6-i))]).cuda()
# get intrinsics
intr_list = [torch.Tensor(inxx).cuda() for inxx in [[1],[1],[1],[1],[1],[0],[0],[1],[0],[0]]]
fl_next = 1
intr_list.append(torch.Tensor([input_size[1] / max_w]).cuda())
intr_list.append(torch.Tensor([input_size[0] / max_h]).cuda())
intr_list.append(torch.Tensor([fl_next]).cuda())
disc_aux = [None,None,None,intr_list,imgL_noaug,None]
# forward
imgL = Variable(torch.FloatTensor(imgL).cuda())
imgR = Variable(torch.FloatTensor(imgR).cuda())
with torch.no_grad():
imgLR = torch.cat([imgL,imgR],0)
model.eval()
torch.cuda.synchronize()
start_time = time.time()
rts = model(imgLR, disc_aux)
torch.cuda.synchronize()
ttime = (time.time() - start_time); print('time = %.2f' % (ttime*1000) )
flow, occ, logmid, logexp = rts
# upsampling
occ = cv2.resize(occ.data.cpu().numpy(), (input_size[1],input_size[0]),interpolation=cv2.INTER_LINEAR)
logexp = cv2.resize(logexp.cpu().numpy(), (input_size[1],input_size[0]),interpolation=cv2.INTER_LINEAR)
logmid = cv2.resize(logmid.cpu().numpy(), (input_size[1],input_size[0]),interpolation=cv2.INTER_LINEAR)
flow = torch.squeeze(flow).data.cpu().numpy()
flow = np.concatenate( [cv2.resize(flow[0],(input_size[1],input_size[0]))[:,:,np.newaxis],
cv2.resize(flow[1],(input_size[1],input_size[0]))[:,:,np.newaxis]],-1)
flow[:,:,0] *= imgL_o.shape[1] / max_w
flow[:,:,1] *= imgL_o.shape[0] / max_h
# deal with unequal size
x0,y0 =np.meshgrid(range(input_size[1]),range(input_size[0]))
hp0 = np.stack([x0,y0],-1) # screen coord
hp1 = flow + hp0
hp1[:,:,0] = hp1[:,:,0]/float(imgL_o.shape[1])*float(imgR_o.shape[1])
hp1[:,:,1] = hp1[:,:,1]/float(imgL_o.shape[0])*float(imgR_o.shape[0])
flow = hp1 - hp0
flow = np.concatenate( (flow, np.ones([flow.shape[0],flow.shape[1],1])),-1)
return flow, occ
def main():
model.eval()
inx=0;jnx=dframe
while True:
if jnx>=len(test_left_img):break
print('%s/%s'%(test_left_img[inx],test_left_img[jnx]))
if inx%dframe==0:
imgL_o = cv2.imread(test_left_img[inx])[:,:,::-1]
imgR_o = cv2.imread(test_left_img[jnx])[:,:,::-1]
mask =cv2.imread(silhouettes[inx],0)
maskR =cv2.imread(silhouettes[jnx],0)
masko = mask.copy()
maskRo = maskR.copy()
mask = mask/np.sort(np.unique(mask))[1]
occluder = mask==255
mask[occluder] = 0
mask =np.logical_and(mask>0, mask!=255)
maskR = maskR/np.sort(np.unique(maskR))[1]
occluder = maskR==255
maskR[occluder] = 0
maskR =np.logical_and(maskR>0,maskR!=255)
indices = np.where(mask>0); xid = indices[1]; yid = indices[0]
length = [ (xid.max()-xid.min())//2, (yid.max()-yid.min())//2]
flowfw, occfw = flow_inference(imgL_o, imgR_o)
flowfw_normed = np.concatenate( [flowfw[:,:,:1]/length[0], flowfw[:,:,1:2]/length[1]],-1 )
flowbw, occbw = flow_inference(imgR_o, imgL_o)
# save predictions
# downsample first
flowfw = resize_to_target(flowfw,is_flow=True)
flowbw = resize_to_target(flowbw,is_flow=True)
occfw = resize_to_target(occfw, is_flow=False)
occbw = resize_to_target(occbw, is_flow=False)
imgL_o = resize_to_target(imgL_o, is_flow=False)
imgR_o = resize_to_target(imgR_o, is_flow=False)
mask = resize_to_target(mask .astype(float), is_flow=False).astype(bool)
maskR = resize_to_target(maskR.astype(float), is_flow=False) .astype(bool)
with open('%s/FlowFW_%d/flo-%05d.pfm'% (seqname,dframe,inx),'w') as f:
save_pfm(f,flowfw[::-1].astype(np.float32))
with open('%s/FlowFW_%d/occ-%05d.pfm'% (seqname,dframe,inx),'w') as f:
save_pfm(f,occfw[::-1].astype(np.float32))
with open('%s/FlowBW_%d/flo-%05d.pfm'% (seqname,dframe,jnx),'w') as f:
save_pfm(f,flowbw[::-1].astype(np.float32))
with open('%s/FlowBW_%d/occ-%05d.pfm'% (seqname,dframe,jnx),'w') as f:
save_pfm(f,occbw[::-1].astype(np.float32))
imwarped = warp_flow(imgR_o, flowfw[:,:,:2])
cv2.imwrite('%s/FlowFW_%d/warp-%05d.jpg'% (seqname, dframe, inx),imwarped[:,:,::-1])
imwarped = warp_flow(imgL_o, flowbw[:,:,:2])
cv2.imwrite('%s/FlowBW_%d/warp-%05d.jpg'% (seqname, dframe, jnx),imwarped[:,:,::-1])
# visualize semi-dense flow for forward
x0,y0 =np.meshgrid(range(flowfw.shape[1]),range(flowfw.shape[0]))
hp0 = np.stack([x0,y0],-1)
dis = warp_flow(hp0+flowbw[...,:2], flowfw[...,:2]) - hp0
dis = np.linalg.norm(dis[:,:,:2],2,-1)
dis = dis / np.sqrt(flowfw.shape[0] * flowfw.shape[1]) * 2
fb_mask = np.exp(-25*dis) > 0.8
#mask = np.logical_and(mask, fb_mask)
mask = fb_mask # do not use object mask
flowvis = flowfw.copy(); flowvis[~mask]=0
flowvis = point_vec(imgL_o, flowvis,skip=10)
cv2.imwrite('%s/FlowFW_%d/visflo-%05d.jpg'% (seqname, dframe, inx),flowvis)
flowvis = flowbw.copy(); flowvis[~maskR]=0
flowvis = point_vec(imgR_o, flowvis)
cv2.imwrite('%s/FlowBW_%d/visflo-%05d.jpg'% (seqname, dframe, jnx),flowvis)
inx+=1
jnx+=1
torch.cuda.empty_cache()
def resize_to_target(flowfw, is_flow=False):
h,w = flowfw.shape[:2]
factor = np.sqrt(250*1000 / (h*w) )
th,tw = int(h*factor), int(w*factor)
factor_h = th/h
factor_w = tw/w
flowfw_d = cv2.resize(flowfw, (tw,th))
if is_flow:
flowfw_d[...,0] *= factor_w
flowfw_d[...,1] *= factor_h
return flowfw_d
if __name__ == '__main__':
main()
|
banmo-main
|
third_party/vcnplus/auto_gen.py
|
"""
# ==============================
# flowlib.py
# library for optical flow processing
# Author: Ruoteng Li
# Date: 6th Aug 2016
# ==============================
"""
import png
from flowutils.util_flow import readPFM
import numpy as np
import matplotlib.colors as cl
import matplotlib.pyplot as plt
from PIL import Image
import cv2
import pdb
UNKNOWN_FLOW_THRESH = 1e7
SMALLFLOW = 0.0
LARGEFLOW = 1e8
"""
=============
Flow Section
=============
"""
def show_flow(filename):
"""
visualize optical flow map using matplotlib
:param filename: optical flow file
:return: None
"""
flow = read_flow(filename)
img = flow_to_image(flow)
plt.imshow(img)
plt.show()
def point_vec(img,flow,skip=40):
maxsize=1000.
extendfac=1.
resize_factor = 1
#resize_factor = max(1,int(max(maxsize/img.shape[0], maxsize/img.shape[1])))
meshgrid = np.meshgrid(range(img.shape[1]),range(img.shape[0]))
dispimg = cv2.resize(img[:,:,::-1].copy(), None,fx=resize_factor,fy=resize_factor)
colorflow = flow_to_image(flow).astype(int)
for i in range(img.shape[1]): # x
for j in range(img.shape[0]): # y
if flow[j,i,2] != 1: continue
if j%skip!=0 or i%skip!=0: continue
xend = int((meshgrid[0][j,i]+extendfac*flow[j,i,0])*resize_factor)
yend = int((meshgrid[1][j,i]+extendfac*flow[j,i,1])*resize_factor)
leng = np.linalg.norm(flow[j,i,:2]*extendfac)
if leng<1:continue
dispimg = cv2.arrowedLine(dispimg, (meshgrid[0][j,i]*resize_factor,meshgrid[1][j,i]*resize_factor),\
(xend,yend),
(int(colorflow[j,i,2]),int(colorflow[j,i,1]),int(colorflow[j,i,0])),1,tipLength=4/leng,line_type=cv2.LINE_AA)
return dispimg
def visualize_flow(flow, mode='Y'):
"""
this function visualize the input flow
:param flow: input flow in array
:param mode: choose which color mode to visualize the flow (Y: Ccbcr, RGB: RGB color)
:return: None
"""
if mode == 'Y':
# Ccbcr color wheel
img = flow_to_image(flow)
elif mode == 'RGB':
(h, w) = flow.shape[0:2]
du = flow[:, :, 0]
dv = flow[:, :, 1]
valid = flow[:, :, 2]
max_flow = np.sqrt(du**2+dv**2).max()
img = np.zeros((h, w, 3), dtype=np.float64)
# angle layer
img[:, :, 0] = np.fmod(np.arctan2(dv, du) / (2 * np.pi)+1.,1.)
# magnitude layer, normalized to 1
img[:, :, 1] = np.sqrt(du * du + dv * dv) * 8 / max_flow
# phase layer
img[:, :, 2] = 8 - img[:, :, 1]
# clip to [0,1]
small_idx = img[:, :, 0:3] < 0
large_idx = img[:, :, 0:3] > 1
img[small_idx] = 0
img[large_idx] = 1
# convert to rgb
img = cl.hsv_to_rgb(img)
# remove invalid point
img[:, :, 0] = img[:, :, 0] * valid
img[:, :, 1] = img[:, :, 1] * valid
img[:, :, 2] = img[:, :, 2] * valid
return img
def read_flow(filename):
"""
read optical flow data from flow file
:param filename: name of the flow file
:return: optical flow data in numpy array
"""
if filename.endswith('.flo'):
flow = read_flo_file(filename)
elif filename.endswith('.png'):
flow = read_png_file(filename)
elif filename.endswith('.pfm'):
flow = read_pfm_file(filename)
else:
raise Exception('Invalid flow file format!')
return flow
import numpy as np
import os
def write_flo(flow, filename):
TAG_STRING = b'PIEH'
assert type(filename) is str, "file is not str %r" % str(filename)
assert filename[-4:] == '.flo', "file ending is not .flo %r" % file[-4:]
height, width, nBands = flow.shape
assert nBands == 2, "Number of bands = %r != 2" % nBands
u = flow[: , : , 0]
v = flow[: , : , 1]
assert u.shape == v.shape, "Invalid flow shape"
height, width = u.shape
f = open(filename,'wb')
f.write(TAG_STRING)
np.array(width).astype(np.int32).tofile(f)
np.array(height).astype(np.int32).tofile(f)
tmp = np.zeros((height, width*nBands))
tmp[:,np.arange(width)*2] = u
tmp[:,np.arange(width)*2 + 1] = v
tmp.astype(np.float32).tofile(f)
f.close()
def write_flow(flow, filename):
"""
write optical flow in Middlebury .flo format
:param flow: optical flow map
:param filename: optical flow file path to be saved
:return: None
"""
f = open(filename, 'wb')
magic = np.array([202021.25], dtype=np.float32)
(height, width) = flow.shape[0:2]
w = np.array([width], dtype=np.int32)
h = np.array([height], dtype=np.int32)
magic.tofile(f)
w.tofile(f)
h.tofile(f)
flow.tofile(f)
f.close()
def save_flow_image(flow, image_file):
"""
save flow visualization into image file
:param flow: optical flow data
:param flow_fil
:return: None
"""
flow_img = flow_to_image(flow)
img_out = Image.fromarray(flow_img)
img_out.save(image_file)
def flowfile_to_imagefile(flow_file, image_file):
"""
convert flowfile into image file
:param flow: optical flow data
:param flow_fil
:return: None
"""
flow = read_flow(flow_file)
save_flow_image(flow, image_file)
def segment_flow(flow):
h = flow.shape[0]
w = flow.shape[1]
u = flow[:, :, 0]
v = flow[:, :, 1]
idx = ((abs(u) > LARGEFLOW) | (abs(v) > LARGEFLOW))
idx2 = (abs(u) == SMALLFLOW)
class0 = (v == 0) & (u == 0)
u[idx2] = 0.00001
tan_value = v / u
class1 = (tan_value < 1) & (tan_value >= 0) & (u > 0) & (v >= 0)
class2 = (tan_value >= 1) & (u >= 0) & (v >= 0)
class3 = (tan_value < -1) & (u <= 0) & (v >= 0)
class4 = (tan_value < 0) & (tan_value >= -1) & (u < 0) & (v >= 0)
class8 = (tan_value >= -1) & (tan_value < 0) & (u > 0) & (v <= 0)
class7 = (tan_value < -1) & (u >= 0) & (v <= 0)
class6 = (tan_value >= 1) & (u <= 0) & (v <= 0)
class5 = (tan_value >= 0) & (tan_value < 1) & (u < 0) & (v <= 0)
seg = np.zeros((h, w))
seg[class1] = 1
seg[class2] = 2
seg[class3] = 3
seg[class4] = 4
seg[class5] = 5
seg[class6] = 6
seg[class7] = 7
seg[class8] = 8
seg[class0] = 0
seg[idx] = 0
return seg
def flow_error(tu, tv, u, v):
"""
Calculate average end point error
:param tu: ground-truth horizontal flow map
:param tv: ground-truth vertical flow map
:param u: estimated horizontal flow map
:param v: estimated vertical flow map
:return: End point error of the estimated flow
"""
smallflow = 0.0
'''
stu = tu[bord+1:end-bord,bord+1:end-bord]
stv = tv[bord+1:end-bord,bord+1:end-bord]
su = u[bord+1:end-bord,bord+1:end-bord]
sv = v[bord+1:end-bord,bord+1:end-bord]
'''
stu = tu[:]
stv = tv[:]
su = u[:]
sv = v[:]
idxUnknow = (abs(stu) > UNKNOWN_FLOW_THRESH) | (abs(stv) > UNKNOWN_FLOW_THRESH)
stu[idxUnknow] = 0
stv[idxUnknow] = 0
su[idxUnknow] = 0
sv[idxUnknow] = 0
ind2 = [(np.absolute(stu) > smallflow) | (np.absolute(stv) > smallflow)]
index_su = su[ind2]
index_sv = sv[ind2]
an = 1.0 / np.sqrt(index_su ** 2 + index_sv ** 2 + 1)
un = index_su * an
vn = index_sv * an
index_stu = stu[ind2]
index_stv = stv[ind2]
tn = 1.0 / np.sqrt(index_stu ** 2 + index_stv ** 2 + 1)
tun = index_stu * tn
tvn = index_stv * tn
'''
angle = un * tun + vn * tvn + (an * tn)
index = [angle == 1.0]
angle[index] = 0.999
ang = np.arccos(angle)
mang = np.mean(ang)
mang = mang * 180 / np.pi
'''
epe = np.sqrt((stu - su) ** 2 + (stv - sv) ** 2)
epe = epe[ind2]
mepe = np.mean(epe)
return mepe
def flow_to_image(flow):
"""
Convert flow into middlebury color code image
:param flow: optical flow map
:return: optical flow image in middlebury color
"""
u = flow[:, :, 0]
v = flow[:, :, 1]
maxu = -999.
maxv = -999.
minu = 999.
minv = 999.
idxUnknow = (abs(u) > UNKNOWN_FLOW_THRESH) | (abs(v) > UNKNOWN_FLOW_THRESH)
u[idxUnknow] = 0
v[idxUnknow] = 0
maxu = max(maxu, np.max(u))
minu = min(minu, np.min(u))
maxv = max(maxv, np.max(v))
minv = min(minv, np.min(v))
rad = np.sqrt(u ** 2 + v ** 2)
maxrad = max(-1, np.max(rad))
u = u/(maxrad + np.finfo(float).eps)
v = v/(maxrad + np.finfo(float).eps)
img = compute_color(u, v)
idx = np.repeat(idxUnknow[:, :, np.newaxis], 3, axis=2)
img[idx] = 0
return np.uint8(img)
def evaluate_flow_file(gt_file, pred_file):
"""
evaluate the estimated optical flow end point error according to ground truth provided
:param gt_file: ground truth file path
:param pred_file: estimated optical flow file path
:return: end point error, float32
"""
# Read flow files and calculate the errors
gt_flow = read_flow(gt_file) # ground truth flow
eva_flow = read_flow(pred_file) # predicted flow
# Calculate errors
average_pe = flow_error(gt_flow[:, :, 0], gt_flow[:, :, 1], eva_flow[:, :, 0], eva_flow[:, :, 1])
return average_pe
def evaluate_flow(gt_flow, pred_flow):
"""
gt: ground-truth flow
pred: estimated flow
"""
average_pe = flow_error(gt_flow[:, :, 0], gt_flow[:, :, 1], pred_flow[:, :, 0], pred_flow[:, :, 1])
return average_pe
"""
==============
Disparity Section
==============
"""
def read_disp_png(file_name):
"""
Read optical flow from KITTI .png file
:param file_name: name of the flow file
:return: optical flow data in matrix
"""
image_object = png.Reader(filename=file_name)
image_direct = image_object.asDirect()
image_data = list(image_direct[2])
(w, h) = image_direct[3]['size']
channel = len(image_data[0]) / w
flow = np.zeros((h, w, channel), dtype=np.uint16)
for i in range(len(image_data)):
for j in range(channel):
flow[i, :, j] = image_data[i][j::channel]
return flow[:, :, 0] / 256
def disp_to_flowfile(disp, filename):
"""
Read KITTI disparity file in png format
:param disp: disparity matrix
:param filename: the flow file name to save
:return: None
"""
f = open(filename, 'wb')
magic = np.array([202021.25], dtype=np.float32)
(height, width) = disp.shape[0:2]
w = np.array([width], dtype=np.int32)
h = np.array([height], dtype=np.int32)
empty_map = np.zeros((height, width), dtype=np.float32)
data = np.dstack((disp, empty_map))
magic.tofile(f)
w.tofile(f)
h.tofile(f)
data.tofile(f)
f.close()
"""
==============
Image Section
==============
"""
def read_image(filename):
"""
Read normal image of any format
:param filename: name of the image file
:return: image data in matrix uint8 type
"""
img = Image.open(filename)
im = np.array(img)
return im
def warp_image(im, flow):
"""
Use optical flow to warp image to the next
:param im: image to warp
:param flow: optical flow
:return: warped image
"""
from scipy import interpolate
image_height = im.shape[0]
image_width = im.shape[1]
flow_height = flow.shape[0]
flow_width = flow.shape[1]
n = image_height * image_width
(iy, ix) = np.mgrid[0:image_height, 0:image_width]
(fy, fx) = np.mgrid[0:flow_height, 0:flow_width]
fx = fx.astype(np.float64)
fy = fy.astype(np.float64)
fx += flow[:,:,0]
fy += flow[:,:,1]
mask = np.logical_or(fx <0 , fx > flow_width)
mask = np.logical_or(mask, fy < 0)
mask = np.logical_or(mask, fy > flow_height)
fx = np.minimum(np.maximum(fx, 0), flow_width)
fy = np.minimum(np.maximum(fy, 0), flow_height)
points = np.concatenate((ix.reshape(n,1), iy.reshape(n,1)), axis=1)
xi = np.concatenate((fx.reshape(n, 1), fy.reshape(n,1)), axis=1)
warp = np.zeros((image_height, image_width, im.shape[2]))
for i in range(im.shape[2]):
channel = im[:, :, i]
plt.imshow(channel, cmap='gray')
values = channel.reshape(n, 1)
new_channel = interpolate.griddata(points, values, xi, method='cubic')
new_channel = np.reshape(new_channel, [flow_height, flow_width])
new_channel[mask] = 1
warp[:, :, i] = new_channel.astype(np.uint8)
return warp.astype(np.uint8)
"""
==============
Others
==============
"""
def pfm_to_flo(pfm_file):
flow_filename = pfm_file[0:pfm_file.find('.pfm')] + '.flo'
(data, scale) = readPFM(pfm_file)
flow = data[:, :, 0:2]
write_flow(flow, flow_filename)
def scale_image(image, new_range):
"""
Linearly scale the image into desired range
:param image: input image
:param new_range: the new range to be aligned
:return: image normalized in new range
"""
min_val = np.min(image).astype(np.float32)
max_val = np.max(image).astype(np.float32)
min_val_new = np.array(min(new_range), dtype=np.float32)
max_val_new = np.array(max(new_range), dtype=np.float32)
scaled_image = (image - min_val) / (max_val - min_val) * (max_val_new - min_val_new) + min_val_new
return scaled_image.astype(np.uint8)
def compute_color(u, v):
"""
compute optical flow color map
:param u: optical flow horizontal map
:param v: optical flow vertical map
:return: optical flow in color code
"""
[h, w] = u.shape
img = np.zeros([h, w, 3])
nanIdx = np.isnan(u) | np.isnan(v)
u[nanIdx] = 0
v[nanIdx] = 0
colorwheel = make_color_wheel()
ncols = np.size(colorwheel, 0)
rad = np.sqrt(u**2+v**2)
a = np.arctan2(-v, -u) / np.pi
fk = (a+1) / 2 * (ncols - 1) + 1
k0 = np.floor(fk).astype(int)
k1 = k0 + 1
k1[k1 == ncols+1] = 1
f = fk - k0
for i in range(0, np.size(colorwheel,1)):
tmp = colorwheel[:, i]
col0 = tmp[k0-1] / 255
col1 = tmp[k1-1] / 255
col = (1-f) * col0 + f * col1
idx = rad <= 1
col[idx] = 1-rad[idx]*(1-col[idx])
notidx = np.logical_not(idx)
col[notidx] *= 0.75
img[:, :, i] = np.uint8(np.floor(255 * col*(1-nanIdx)))
return img
def make_color_wheel():
"""
Generate color wheel according Middlebury color code
:return: Color wheel
"""
RY = 15
YG = 6
GC = 4
CB = 11
BM = 13
MR = 6
ncols = RY + YG + GC + CB + BM + MR
colorwheel = np.zeros([ncols, 3])
col = 0
# RY
colorwheel[0:RY, 0] = 255
colorwheel[0:RY, 1] = np.transpose(np.floor(255*np.arange(0, RY) / RY))
col += RY
# YG
colorwheel[col:col+YG, 0] = 255 - np.transpose(np.floor(255*np.arange(0, YG) / YG))
colorwheel[col:col+YG, 1] = 255
col += YG
# GC
colorwheel[col:col+GC, 1] = 255
colorwheel[col:col+GC, 2] = np.transpose(np.floor(255*np.arange(0, GC) / GC))
col += GC
# CB
colorwheel[col:col+CB, 1] = 255 - np.transpose(np.floor(255*np.arange(0, CB) / CB))
colorwheel[col:col+CB, 2] = 255
col += CB
# BM
colorwheel[col:col+BM, 2] = 255
colorwheel[col:col+BM, 0] = np.transpose(np.floor(255*np.arange(0, BM) / BM))
col += + BM
# MR
colorwheel[col:col+MR, 2] = 255 - np.transpose(np.floor(255 * np.arange(0, MR) / MR))
colorwheel[col:col+MR, 0] = 255
return colorwheel
def read_flo_file(filename):
"""
Read from Middlebury .flo file
:param flow_file: name of the flow file
:return: optical flow data in matrix
"""
f = open(filename, 'rb')
magic = np.fromfile(f, np.float32, count=1)
data2d = None
if 202021.25 != magic:
print('Magic number incorrect. Invalid .flo file')
else:
w = np.fromfile(f, np.int32, count=1)
h = np.fromfile(f, np.int32, count=1)
#print("Reading %d x %d flow file in .flo format" % (h, w))
flow = np.ones((h[0],w[0],3))
data2d = np.fromfile(f, np.float32, count=2 * w[0] * h[0])
# reshape data into 3D array (columns, rows, channels)
data2d = np.resize(data2d, (h[0], w[0], 2))
flow[:,:,:2] = data2d
f.close()
return flow
def read_png_file(flow_file):
"""
Read from KITTI .png file
:param flow_file: name of the flow file
:return: optical flow data in matrix
"""
flow = cv2.imread(flow_file,-1)[:,:,::-1].astype(np.float64)
# flow_object = png.Reader(filename=flow_file)
# flow_direct = flow_object.asDirect()
# flow_data = list(flow_direct[2])
# (w, h) = flow_direct[3]['size']
# #print("Reading %d x %d flow file in .png format" % (h, w))
# flow = np.zeros((h, w, 3), dtype=np.float64)
# for i in range(len(flow_data)):
# flow[i, :, 0] = flow_data[i][0::3]
# flow[i, :, 1] = flow_data[i][1::3]
# flow[i, :, 2] = flow_data[i][2::3]
invalid_idx = (flow[:, :, 2] == 0)
flow[:, :, 0:2] = (flow[:, :, 0:2] - 2 ** 15) / 64.0
flow[invalid_idx, 0] = 0
flow[invalid_idx, 1] = 0
return flow
def read_pfm_file(flow_file):
"""
Read from .pfm file
:param flow_file: name of the flow file
:return: optical flow data in matrix
"""
(data, scale) = readPFM(flow_file)
return data
# fast resample layer
def resample(img, sz):
"""
img: flow map to be resampled
sz: new flow map size. Must be [height,weight]
"""
original_image_size = img.shape
in_height = img.shape[0]
in_width = img.shape[1]
out_height = sz[0]
out_width = sz[1]
out_flow = np.zeros((out_height, out_width, 2))
# find scale
height_scale = float(in_height) / float(out_height)
width_scale = float(in_width) / float(out_width)
[x,y] = np.meshgrid(range(out_width), range(out_height))
xx = x * width_scale
yy = y * height_scale
x0 = np.floor(xx).astype(np.int32)
x1 = x0 + 1
y0 = np.floor(yy).astype(np.int32)
y1 = y0 + 1
x0 = np.clip(x0,0,in_width-1)
x1 = np.clip(x1,0,in_width-1)
y0 = np.clip(y0,0,in_height-1)
y1 = np.clip(y1,0,in_height-1)
Ia = img[y0,x0,:]
Ib = img[y1,x0,:]
Ic = img[y0,x1,:]
Id = img[y1,x1,:]
wa = (y1-yy) * (x1-xx)
wb = (yy-y0) * (x1-xx)
wc = (y1-yy) * (xx-x0)
wd = (yy-y0) * (xx-x0)
out_flow[:,:,0] = (Ia[:,:,0]*wa + Ib[:,:,0]*wb + Ic[:,:,0]*wc + Id[:,:,0]*wd) * out_width / in_width
out_flow[:,:,1] = (Ia[:,:,1]*wa + Ib[:,:,1]*wb + Ic[:,:,1]*wc + Id[:,:,1]*wd) * out_height / in_height
return out_flow
|
banmo-main
|
third_party/vcnplus/flowutils/flowlib.py
|
"""
Taken from https://github.com/ClementPinard/FlowNetPytorch
"""
import pdb
import torch
import torch.nn.functional as F
def EPE(input_flow, target_flow, mask, sparse=False, mean=True):
#mask = target_flow[:,2]>0
target_flow = target_flow[:,:2]
EPE_map = torch.norm(target_flow-input_flow,2,1)
batch_size = EPE_map.size(0)
if sparse:
# invalid flow is defined with both flow coordinates to be exactly 0
mask = (target_flow[:,0] == 0) & (target_flow[:,1] == 0)
EPE_map = EPE_map[~mask]
if mean:
return EPE_map[mask].mean()
else:
return EPE_map[mask].sum()/batch_size
def rob_EPE(input_flow, target_flow, mask, sparse=False, mean=True):
#mask = target_flow[:,2]>0
target_flow = target_flow[:,:2]
#TODO
# EPE_map = torch.norm(target_flow-input_flow,2,1)
EPE_map = (torch.norm(target_flow-input_flow,1,1)+0.01).pow(0.4)
batch_size = EPE_map.size(0)
if sparse:
# invalid flow is defined with both flow coordinates to be exactly 0
mask = (target_flow[:,0] == 0) & (target_flow[:,1] == 0)
EPE_map = EPE_map[~mask]
if mean:
return EPE_map[mask].mean()
else:
return EPE_map[mask].sum()/batch_size
def sparse_max_pool(input, size):
'''Downsample the input by considering 0 values as invalid.
Unfortunately, no generic interpolation mode can resize a sparse map correctly,
the strategy here is to use max pooling for positive values and "min pooling"
for negative values, the two results are then summed.
This technique allows sparsity to be minized, contrary to nearest interpolation,
which could potentially lose information for isolated data points.'''
positive = (input > 0).float()
negative = (input < 0).float()
output = F.adaptive_max_pool2d(input * positive, size) - F.adaptive_max_pool2d(-input * negative, size)
return output
def multiscaleEPE(network_output, target_flow, mask, weights=None, sparse=False, rob_loss = False):
def one_scale(output, target, mask, sparse):
b, _, h, w = output.size()
if sparse:
target_scaled = sparse_max_pool(target, (h, w))
else:
target_scaled = F.interpolate(target, (h, w), mode='area')
mask = F.interpolate(mask.float().unsqueeze(1), (h, w), mode='bilinear').squeeze(1)==1
if rob_loss:
return rob_EPE(output, target_scaled, mask, sparse, mean=False)
else:
return EPE(output, target_scaled, mask, sparse, mean=False)
if type(network_output) not in [tuple, list]:
network_output = [network_output]
if weights is None:
weights = [0.005, 0.01, 0.02, 0.08, 0.32] # as in original article
assert(len(weights) == len(network_output))
loss = 0
for output, weight in zip(network_output, weights):
loss += weight * one_scale(output, target_flow, mask, sparse)
return loss
def realEPE(output, target, mask, sparse=False):
b, _, h, w = target.size()
upsampled_output = F.interpolate(output, (h,w), mode='bilinear', align_corners=False)
return EPE(upsampled_output, target,mask, sparse, mean=True)
|
banmo-main
|
third_party/vcnplus/flowutils/multiscaleloss.py
|
import errno
import os
import shutil
import sys
import traceback
import zipfile
if sys.version_info[0] == 2:
import urllib2
else:
import urllib.request
def add_image(log,tag,img,step):
"""
for torch tensorboard
"""
timg = img[0]
timg = (timg-timg.min())/(timg.max()-timg.min())
if len(timg.shape)==2:
formats='HW'
elif timg.shape[0]==3:
formats='CHW'
else:
formats='HWC'
log.add_image(tag,timg,step,dataformats=formats)
# Converts a string to bytes (for writing the string into a file). Provided for
# compatibility with Python 2 and 3.
def StrToBytes(text):
if sys.version_info[0] == 2:
return text
else:
return bytes(text, 'UTF-8')
# Outputs the given text and lets the user input a response (submitted by
# pressing the return key). Provided for compatibility with Python 2 and 3.
def GetUserInput(text):
if sys.version_info[0] == 2:
return raw_input(text)
else:
return input(text)
# Creates the given directory (hierarchy), which may already exist. Provided for
# compatibility with Python 2 and 3.
def MakeDirsExistOk(directory_path):
try:
os.makedirs(directory_path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
# Deletes all files and folders within the given folder.
def DeleteFolderContents(folder_path):
for file_name in os.listdir(folder_path):
file_path = os.path.join(folder_path, file_name)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
else: #if os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
print('Exception in DeleteFolderContents():')
print(e)
print('Stack trace:')
print(traceback.format_exc())
# Creates the given directory, respectively deletes all content of the directory
# in case it already exists.
def MakeCleanDirectory(folder_path):
if os.path.isdir(folder_path):
DeleteFolderContents(folder_path)
else:
MakeDirsExistOk(folder_path)
# Downloads the given URL to a file in the given directory. Returns the
# path to the downloaded file.
# In part adapted from: https://stackoverflow.com/questions/22676
def DownloadFile(url, dest_dir_path):
file_name = url.split('/')[-1]
dest_file_path = os.path.join(dest_dir_path, file_name)
if os.path.isfile(dest_file_path):
print('The following file already exists:')
print(dest_file_path)
print('Please choose whether to re-download and overwrite the file [o] or to skip downloading this file [s] by entering o or s.')
while True:
response = GetUserInput("> ")
if response == 's':
return dest_file_path
elif response == 'o':
break
else:
print('Please enter o or s.')
url_object = None
if sys.version_info[0] == 2:
url_object = urllib2.urlopen(url)
else:
url_object = urllib.request.urlopen(url)
with open(dest_file_path, 'wb') as outfile:
meta = url_object.info()
file_size = 0
if sys.version_info[0] == 2:
file_size = int(meta.getheaders("Content-Length")[0])
else:
file_size = int(meta["Content-Length"])
print("Downloading: %s (size [bytes]: %s)" % (url, file_size))
file_size_downloaded = 0
block_size = 8192
while True:
buffer = url_object.read(block_size)
if not buffer:
break
file_size_downloaded += len(buffer)
outfile.write(buffer)
sys.stdout.write("%d / %d (%3f%%)\r" % (file_size_downloaded, file_size, file_size_downloaded * 100. / file_size))
sys.stdout.flush()
return dest_file_path
# Unzips the given zip file into the given directory.
def UnzipFile(file_path, unzip_dir_path, overwrite=True):
zip_ref = zipfile.ZipFile(open(file_path, 'rb'))
if not overwrite:
for f in zip_ref.namelist():
if not os.path.isfile(os.path.join(unzip_dir_path, f)):
zip_ref.extract(f, path=unzip_dir_path)
else:
print('Not overwriting {}'.format(f))
else:
zip_ref.extractall(unzip_dir_path)
zip_ref.close()
# Creates a zip file with the contents of the given directory.
# The archive_base_path must not include the extension .zip. The full, final
# path of the archive is returned by the function.
def ZipDirectory(archive_base_path, root_dir_path):
# return shutil.make_archive(archive_base_path, 'zip', root_dir_path) # THIS WILL ALWAYS HAVE ./ FOLDER INCLUDED
with zipfile.ZipFile(archive_base_path+'.zip', "w", compression=zipfile.ZIP_DEFLATED) as zf:
base_path = os.path.normpath(root_dir_path)
for dirpath, dirnames, filenames in os.walk(root_dir_path):
for name in sorted(dirnames):
path = os.path.normpath(os.path.join(dirpath, name))
zf.write(path, os.path.relpath(path, base_path))
for name in filenames:
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
zf.write(path, os.path.relpath(path, base_path))
return archive_base_path+'.zip'
# Downloads a zip file and directly unzips it.
def DownloadAndUnzipFile(url, archive_dir_path, unzip_dir_path, overwrite=True):
archive_path = DownloadFile(url, archive_dir_path)
UnzipFile(archive_path, unzip_dir_path, overwrite=overwrite)
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
|
banmo-main
|
third_party/vcnplus/flowutils/io.py
|
import math
import png
import struct
import array
import numpy as np
import cv2
import pdb
from io import *
UNKNOWN_FLOW_THRESH = 1e9;
UNKNOWN_FLOW = 1e10;
# Middlebury checks
TAG_STRING = 'PIEH' # use this when WRITING the file
TAG_FLOAT = 202021.25 # check for this when READING the file
def readPFM(file):
import re
file = open(file, 'rb')
color = None
width = None
height = None
scale = None
endian = None
header = file.readline().rstrip()
if header == b'PF':
color = True
elif header == b'Pf':
color = False
else:
raise Exception('Not a PFM file.')
dim_match = re.match(b'^(\d+)\s(\d+)\s$', file.readline())
if dim_match:
width, height = map(int, dim_match.groups())
else:
raise Exception('Malformed PFM header.')
scale = float(file.readline().rstrip())
if scale < 0: # little-endian
endian = '<'
scale = -scale
else:
endian = '>' # big-endian
data = np.fromfile(file, endian + 'f')
shape = (height, width, 3) if color else (height, width)
data = np.reshape(data, shape)
data = np.flipud(data)
return data, scale
def save_pfm(file, image, scale = 1):
import sys
color = None
if image.dtype.name != 'float32':
raise Exception('Image dtype must be float32.')
if len(image.shape) == 3 and image.shape[2] == 3: # color image
color = True
elif len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1: # greyscale
color = False
else:
raise Exception('Image must have H x W x 3, H x W x 1 or H x W dimensions.')
file.write('PF\n' if color else 'Pf\n')
file.write('%d %d\n' % (image.shape[1], image.shape[0]))
endian = image.dtype.byteorder
if endian == '<' or endian == '=' and sys.byteorder == 'little':
scale = -scale
file.write('%f\n' % scale)
image.tofile(file)
def ReadMiddleburyFloFile(path):
""" Read .FLO file as specified by Middlebury.
Returns tuple (width, height, u, v, mask), where u, v, mask are flat
arrays of values.
"""
with open(path, 'rb') as fil:
tag = struct.unpack('f', fil.read(4))[0]
width = struct.unpack('i', fil.read(4))[0]
height = struct.unpack('i', fil.read(4))[0]
assert tag == TAG_FLOAT
#data = np.fromfile(path, dtype=np.float, count=-1)
#data = data[3:]
fmt = 'f' * width*height*2
data = struct.unpack(fmt, fil.read(4*width*height*2))
u = data[::2]
v = data[1::2]
mask = map(lambda x,y: abs(x)<UNKNOWN_FLOW_THRESH and abs(y) < UNKNOWN_FLOW_THRESH, u, v)
mask = list(mask)
u_masked = map(lambda x,y: x if y else 0, u, mask)
v_masked = map(lambda x,y: x if y else 0, v, mask)
return width, height, list(u_masked), list(v_masked), list(mask)
def ReadKittiPngFile(path):
""" Read 16-bit .PNG file as specified by KITTI-2015 (flow).
Returns a tuple, (width, height, u, v, mask), where u, v, mask
are flat arrays of values.
"""
# Read .png file.
png_reader = png.Reader(path)
data = png_reader.read()
if data[3]['bitdepth'] != 16:
raise Exception('bitdepth of ' + path + ' is not 16')
width = data[0]
height = data[1]
# Get list of rows.
rows = list(data[2])
u = array.array('f', [0]) * width*height
v = array.array('f', [0]) * width*height
mask = array.array('f', [0]) * width*height
for y, row in enumerate(rows):
for x in range(width):
ind = width*y+x
u[ind] = (row[3*x] - 2**15) / 64.0
v[ind] = (row[3*x+1] - 2**15) / 64.0
mask[ind] = row[3*x+2]
# if mask[ind] > 0:
# print(u[ind], v[ind], mask[ind], row[3*x], row[3*x+1], row[3*x+2])
#png_reader.close()
return (width, height, u, v, mask)
def WriteMiddleburyFloFile(path, width, height, u, v, mask=None):
""" Write .FLO file as specified by Middlebury.
"""
if mask is not None:
u_masked = map(lambda x,y: x if y else UNKNOWN_FLOW, u, mask)
v_masked = map(lambda x,y: x if y else UNKNOWN_FLOW, v, mask)
else:
u_masked = u
v_masked = v
fmt = 'f' * width*height*2
# Interleave lists
data = [x for t in zip(u_masked,v_masked) for x in t]
with open(path, 'wb') as fil:
fil.write(str.encode(TAG_STRING))
fil.write(struct.pack('i', width))
fil.write(struct.pack('i', height))
fil.write(struct.pack(fmt, *data))
def write_flow(path,flow):
invalid_idx = (flow[:, :, 2] == 0)
flow[:, :, 0:2] = flow[:, :, 0:2]*64.+ 2 ** 15
flow[invalid_idx, 0] = 0
flow[invalid_idx, 1] = 0
flow = flow.astype(np.uint16)
flow = cv2.imwrite(path, flow[:,:,::-1])
#WriteKittiPngFile(path,
# flow.shape[1], flow.shape[0], flow[:,:,0].flatten(),
# flow[:,:,1].flatten(), flow[:,:,2].flatten())
def WriteKittiPngFile(path, width, height, u, v, mask=None):
""" Write 16-bit .PNG file as specified by KITTI-2015 (flow).
u, v are lists of float values
mask is a list of floats, denoting the *valid* pixels.
"""
data = array.array('H',[0])*width*height*3
for i,(u_,v_,mask_) in enumerate(zip(u,v,mask)):
data[3*i] = int(u_*64.0+2**15)
data[3*i+1] = int(v_*64.0+2**15)
data[3*i+2] = int(mask_)
# if mask_ > 0:
# print(data[3*i], data[3*i+1],data[3*i+2])
with open(path, 'wb') as png_file:
png_writer = png.Writer(width=width, height=height, bitdepth=16, compression=3, greyscale=False)
png_writer.write_array(png_file, data)
def ConvertMiddleburyFloToKittiPng(src_path, dest_path):
width, height, u, v, mask = ReadMiddleburyFloFile(src_path)
WriteKittiPngFile(dest_path, width, height, u, v, mask=mask)
def ConvertKittiPngToMiddleburyFlo(src_path, dest_path):
width, height, u, v, mask = ReadKittiPngFile(src_path)
WriteMiddleburyFloFile(dest_path, width, height, u, v, mask=mask)
def ParseFilenameKitti(filename):
# Parse kitti filename (seq_frameno.xx),
# return seq, frameno, ext.
# Be aware that seq might contain the dataset name (if contained as prefix)
ext = filename[filename.rfind('.'):]
frameno = filename[filename.rfind('_')+1:filename.rfind('.')]
frameno = int(frameno)
seq = filename[:filename.rfind('_')]
return seq, frameno, ext
def read_calib_file(filepath):
"""Read in a calibration file and parse into a dictionary."""
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
def load_calib_cam_to_cam(cam_to_cam_file):
# We'll return the camera calibration as a dictionary
data = {}
# Load and parse the cam-to-cam calibration data
filedata = read_calib_file(cam_to_cam_file)
# Create 3x4 projection matrices
P_rect_00 = np.reshape(filedata['P_rect_00'], (3, 4))
P_rect_10 = np.reshape(filedata['P_rect_01'], (3, 4))
P_rect_20 = np.reshape(filedata['P_rect_02'], (3, 4))
P_rect_30 = np.reshape(filedata['P_rect_03'], (3, 4))
# Compute the camera intrinsics
data['K_cam0'] = P_rect_00[0:3, 0:3]
data['K_cam1'] = P_rect_10[0:3, 0:3]
data['K_cam2'] = P_rect_20[0:3, 0:3]
data['K_cam3'] = P_rect_30[0:3, 0:3]
data['b00'] = P_rect_00[0, 3] / P_rect_00[0, 0]
data['b10'] = P_rect_10[0, 3] / P_rect_10[0, 0]
data['b20'] = P_rect_20[0, 3] / P_rect_20[0, 0]
data['b30'] = P_rect_30[0, 3] / P_rect_30[0, 0]
return data
|
banmo-main
|
third_party/vcnplus/flowutils/util_flow.py
|
banmo-main
|
third_party/vcnplus/flowutils/__init__.py
|
|
gpuid = 1
import pdb
import sys
import torch
import numpy as np
import cv2
def write_calib(K,bl,shape,maxd,path):
str1 = 'camera.A=[%f 0 %f; 0 %f %f; 0 0 1]'%(K[0,0], K[0,2], K[1,1],K[1,2])
str2 = 'camera.height=%d'%(shape[0])
str3 = 'camera.width=%d' %(shape[1])
str4 = 'camera.zmax=%f'%(maxd)
str5 = 'rho=%f'%(bl*K[0,0])
with open(path,'w') as f:
f.write('%s\n%s\n%s\n%s\n%s'%(str1,str2,str3,str4,str5))
def create_ade20k_label_colormap():
"""Creates a label colormap used in ADE20K segmentation benchmark.
Returns:
A colormap for visualizing segmentation results.
"""
return np.asarray([
[0, 0, 0],
[120, 120, 120],
[180, 120, 120],
[6, 230, 230],
[80, 50, 50],
[4, 200, 3],
[120, 120, 80],
[140, 140, 140],
[204, 5, 255],
[230, 230, 230],
[4, 250, 7],
[224, 5, 255],
[235, 255, 7],
[150, 5, 61],
[120, 120, 70],
[8, 255, 51],
[255, 6, 82],
[143, 255, 140],
[204, 255, 4],
[255, 51, 7],
[204, 70, 3],
[0, 102, 200],
[61, 230, 250],
[255, 6, 51],
[11, 102, 255],
[255, 7, 71],
[255, 9, 224],
[9, 7, 230],
[220, 220, 220],
[255, 9, 92],
[112, 9, 255],
[8, 255, 214],
[7, 255, 224],
[255, 184, 6],
[10, 255, 71],
[255, 41, 10],
[7, 255, 255],
[224, 255, 8],
[102, 8, 255],
[255, 61, 6],
[255, 194, 7],
[255, 122, 8],
[0, 255, 20],
[255, 8, 41],
[255, 5, 153],
[6, 51, 255],
[235, 12, 255],
[160, 150, 20],
[0, 163, 255],
[140, 140, 140],
[250, 10, 15],
[20, 255, 0],
[31, 255, 0],
[255, 31, 0],
[255, 224, 0],
[153, 255, 0],
[0, 0, 255],
[255, 71, 0],
[0, 235, 255],
[0, 173, 255],
[31, 0, 255],
[11, 200, 200],
[255, 82, 0],
[0, 255, 245],
[0, 61, 255],
[0, 255, 112],
[0, 255, 133],
[255, 0, 0],
[255, 163, 0],
[255, 102, 0],
[194, 255, 0],
[0, 143, 255],
[51, 255, 0],
[0, 82, 255],
[0, 255, 41],
[0, 255, 173],
[10, 0, 255],
[173, 255, 0],
[0, 255, 153],
[255, 92, 0],
[255, 0, 255],
[255, 0, 245],
[255, 0, 102],
[255, 173, 0],
[255, 0, 20],
[255, 184, 184],
[0, 31, 255],
[0, 255, 61],
[0, 71, 255],
[255, 0, 204],
[0, 255, 194],
[0, 255, 82],
[0, 10, 255],
[0, 112, 255],
[51, 0, 255],
[0, 194, 255],
[0, 122, 255],
[0, 255, 163],
[255, 153, 0],
[0, 255, 10],
[255, 112, 0],
[143, 255, 0],
[82, 0, 255],
[163, 255, 0],
[255, 235, 0],
[8, 184, 170],
[133, 0, 255],
[0, 255, 92],
[184, 0, 255],
[255, 0, 31],
[0, 184, 255],
[0, 214, 255],
[255, 0, 112],
[92, 255, 0],
[0, 224, 255],
[112, 224, 255],
[70, 184, 160],
[163, 0, 255],
[153, 0, 255],
[71, 255, 0],
[255, 0, 163],
[255, 204, 0],
[255, 0, 143],
[0, 255, 235],
[133, 255, 0],
[255, 0, 235],
[245, 0, 255],
[255, 0, 122],
[255, 245, 0],
[10, 190, 212],
[214, 255, 0],
[0, 204, 255],
[20, 0, 255],
[255, 255, 0],
[0, 153, 255],
[0, 41, 255],
[0, 255, 204],
[41, 0, 255],
[41, 255, 0],
[173, 0, 255],
[0, 245, 255],
[71, 0, 255],
[122, 0, 255],
[0, 255, 184],
[0, 92, 255],
[184, 255, 0],
[0, 133, 255],
[255, 214, 0],
[25, 194, 194],
[102, 255, 0],
[92, 0, 255],
])
def write_pfm(path, image, scale=1):
"""Write pfm file.
Args:
path (str): pathto file
image (array): data
scale (int, optional): Scale. Defaults to 1.
"""
with open(path, "wb") as file:
color = None
if image.dtype.name != "float32":
raise Exception("Image dtype must be float32.")
image = np.flipud(image)
if len(image.shape) == 3 and image.shape[2] == 3: # color image
color = True
elif (
len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1
): # greyscale
color = False
else:
raise Exception("Image must have H x W x 3, H x W x 1 or H x W dimensions.")
file.write("PF\n".encode() if color else "Pf\n".encode())
file.write("%d %d\n".encode() % (image.shape[1], image.shape[0]))
endian = image.dtype.byteorder
if endian == "<" or endian == "=" and sys.byteorder == "little":
scale = -scale
file.write("%f\n".encode() % scale)
image.tofile(file)
def triangulation(disp, xcoord, ycoord, bl=1, fl = 450, cx = 479.5, cy = 269.5):
mask = (disp<=0).flatten()
depth = bl*fl / (disp) # 450px->15mm focal length
X = (xcoord - cx) * depth / fl
Y = (ycoord - cy) * depth / fl
Z = depth
P = np.concatenate((X[np.newaxis],Y[np.newaxis],Z[np.newaxis]),0).reshape(3,-1)
P = np.concatenate((P,np.ones((1,P.shape[-1]))),0)
P[:,mask]=0
return P
def midpoint_triangulate(x, cam):
"""
Args:
x: Set of 2D points in homogeneous coords, (3 x n x N) matrix
cam: Collection of n objects, each containing member variables
cam.P - 3x4 camera matrix [0]
cam.R - 3x3 rotation matrix [1]
cam.T - 3x1 translation matrix [2]
Returns:
midpoint: 3D point in homogeneous coords, (4 x 1) matrix
"""
n = len(cam) # No. of cameras
N = x.shape[-1]
I = np.eye(3) # 3x3 identity matrix
A = np.zeros((3,n))
B = np.zeros((3,n,N))
sigma2 = np.zeros((3,N))
for i in range(n):
a = -np.linalg.inv(cam[i][:3,:3]).dot(cam[i][:3,-1:]) # ith camera position #
A[:,i,None] = a
if i==0:
b = np.linalg.pinv(cam[i][:3,:3]).dot(x[:,i]) # Directional vector # 4, N
else:
b = np.linalg.pinv(cam[i]).dot(x[:,i]) # Directional vector # 4, N
b = b / b[3:]
b = b[:3,:] - a # 3,N
b = b / np.linalg.norm(b,2,0)[np.newaxis]
B[:,i,:] = b
sigma2 = sigma2 + b * (b.T.dot(a).reshape(-1,N)) # 3,N
Bo = B.transpose([2,0,1])
Bt = B.transpose([2,1,0])
Bo = torch.DoubleTensor(Bo)
Bt = torch.DoubleTensor(Bt)
A = torch.DoubleTensor(A)
sigma2 = torch.DoubleTensor(sigma2)
I = torch.DoubleTensor(I)
BoBt = torch.matmul(Bo, Bt)
C = (n * I)[np.newaxis] - BoBt# N,3,3
Cinv = C.inverse()
sigma1 = torch.sum(A, axis=1)[:,None]
m1 = I[np.newaxis] + torch.matmul(BoBt,Cinv)
m2 = torch.matmul(Cinv,sigma2.T[:,:,np.newaxis])
midpoint = (1/n) * torch.matmul(m1,sigma1[np.newaxis]) - m2
midpoint = np.asarray(midpoint)
return midpoint[:,:,0].T, np.asarray(Bo)
def register_disp_fast(id_flow, id_mono, mask, inlier_th=0.01,niters=100):
"""
input: disp_flow, disp_mono, mask
output: inlier_mask, registered
register up-to-scale rough depth to motion-based depth
"""
shape = id_mono.shape
id_mono = id_mono.flatten()
disp_flow = id_flow[mask] # register to flow with mono
disp_mono = id_mono[mask]
num_samp = min(3000,len(disp_flow))
np.random.seed(0)
submask = np.random.choice(range(len(disp_flow)), num_samp)
disp_flow = disp_flow[submask]
disp_mono = disp_mono[submask]
n = len(disp_flow)
sample_size=niters
rand_idx = np.random.choice(range(n),sample_size)
scale_cand = (disp_flow/disp_mono)[rand_idx]
dis_cand = np.abs(np.log(disp_mono[:,np.newaxis]*scale_cand[np.newaxis])-np.log(disp_flow[:,np.newaxis]))
rank_metric = (dis_cand<inlier_th).sum(0)
scale_idx = np.argmax(rank_metric)
scale = scale_cand[scale_idx]
# # another way to align scale
# from scipy.optimize import minimize
# def cost_function(alpha, K):
# return np.mean(np.abs(alpha*K - 1))
#
# # MRE minimize
# output = minimize(cost_function, 1., args=(disp_mono/disp_flow),method='Nelder-Mead')
# if output.success:
# scale = output.x
dis = np.abs(np.log(disp_mono*scale)-np.log(disp_flow))
ninliers = (dis<inlier_th).sum()/n
registered_flow=(id_flow.reshape(shape))/scale
return registered_flow, scale, ninliers
def testEss(K0,K1,R,T,p1,p2):
testP = cv2.triangulatePoints(K0.dot(np.concatenate( (np.eye(3),np.zeros((3,1))), -1)),
K1.dot(np.concatenate( (R,T), -1)),
p1[:2],p2[:2])
Z1 = testP[2,:]/testP[-1,:]
Z2 = (R.dot(Z1*np.linalg.inv(K0).dot(p1))+T)[-1,:]
if ((Z1>0).sum() > (Z1<=0).sum()) and ((Z2>0).sum() > (Z2<=0).sum()):
#print(Z1)
#print(Z2)
return True
else:
return False
def pose_estimate(K0,K1,hp0,hp1,strict_mask,rot,th=0.0001):
# # epipolar geometry
# from models.submodule import F_ngransac
# tmphp0 = hp0[:,strict_mask]
# tmphp1 = hp1[:,strict_mask]
# #num_samp = min(300000,tmphp0.shape[1])
# num_samp = min(30000,tmphp0.shape[1])
# #num_samp = min(3000,tmphp0.shape[1])
# submask = np.random.choice(range(tmphp0.shape[1]), num_samp)
# tmphp0 = tmphp0[:,submask]
# tmphp1 = tmphp1[:,submask]
#
# rotx,transx,Ex = F_ngransac(torch.Tensor(tmphp0.T[np.newaxis]).cuda(),
# torch.Tensor(tmphp1.T[np.newaxis]).cuda(),
# torch.Tensor(K0[np.newaxis]).cuda(),
# False,0,
# Kn = torch.Tensor(K1[np.newaxis]).cuda())
# R01 = cv2.Rodrigues(np.asarray(rotx[0]))[0]
# T01 = np.asarray(transx[0])
# E = np.asarray(Ex[0])
# _,R01,T01,_ = cv2.recoverPose(E.astype(float), tmphp0[:2].T, tmphp1[:2].T, K0) # RT are 0->1 points transform
# T01 = T01[:,0]
# R01=R01.T
# T01=-R01.dot(T01) # now are 1->0 points transform
E, maskk = cv2.findEssentialMat(np.linalg.inv(K0).dot(hp0[:,strict_mask])[:2].T,
np.linalg.inv(K1).dot(hp1[:,strict_mask])[:2].T, np.eye(3),
cv2.LMEDS,threshold=th)
valid_points = np.ones((strict_mask.sum())).astype(bool)
valid_points[~maskk[:,0].astype(bool)]=False
fmask = strict_mask.copy()
fmask[strict_mask]=valid_points
R1, R2, T = cv2.decomposeEssentialMat(E)
for rott in [(R1,T),(R2,T),(R1,-T),(R2,-T)]:
if testEss(K0,K1,rott[0],rott[1],hp0[:,fmask], hp1[:,fmask]):
R01=rott[0].T
T01=-R01.dot(rott[1][:,0])
if not 'T01' in locals():
T01 = np.asarray([0,0,1])
R01 = np.eye(3)
T01t = T01.copy()
# compensate R
H01 = K0.dot(R01).dot(np.linalg.inv(K1)) # plane at infinity
comp_hp1 = H01.dot(hp1)
comp_hp1 = comp_hp1/comp_hp1[-1:]
return R01,T01,H01,comp_hp1,E
def evaluate_tri(t10,R01,K0,K1,hp0,hp1,disp0,ent,bl,inlier_th=0.1,select_th=0.4, valid_mask=None):
if valid_mask is not None:
hp0 = hp0[:,valid_mask]
hp1 = hp1[:,valid_mask]
disp0 = disp0.flatten()[valid_mask]
ent = ent.flatten()[valid_mask]
# triangluation
#import time; beg = time.time()
cams = [K0.dot(np.concatenate( (np.eye(3),np.zeros((3,1))), -1)),
K1.dot(np.concatenate( (R01.T,-R01.T.dot(t10[:,np.newaxis])), -1)) ]
P_pred,_ = midpoint_triangulate( np.concatenate([hp0[:,np.newaxis],hp1[:,np.newaxis]],1),cams)
#print(1000*(time.time()-beg))
idepth_p3d = np.clip(K0[0,0]*bl/P_pred[2], 1e-6, np.inf)
# discard points with small disp
entmask = np.logical_and(idepth_p3d>1e-12, ~np.isinf(idepth_p3d))
entmask_tmp = entmask[entmask].copy()
entmask_tmp[np.argsort(-idepth_p3d[entmask])[entmask.sum()//2:]]=False # remove sky
entmask[entmask] = entmask_tmp
med = np.median(idepth_p3d[entmask])
entmask = np.logical_and(entmask, np.logical_and(idepth_p3d>med/5., idepth_p3d<med*5))
if entmask.sum()<10:
return None,None,None
registered_p3d,scale,ninliers = register_disp_fast(idepth_p3d, disp0, entmask,
inlier_th=inlier_th,niters=100)
print('size/inlier ratio: %d/%.2f'%(entmask.sum(),ninliers))
disp_ratio = np.abs(np.log(registered_p3d.flatten()/disp0.flatten()))
agree_mask = disp_ratio<np.log(select_th)
rank = np.argsort(disp_ratio)
return agree_mask,t10*scale,rank
def rb_fitting(bgmask_pred,mask_pred,idepth,flow,ent,K0,K1,bl,parallax_th=2,mono=True,sintel=False,tranpred=None,quatpred=None):
if sintel: parallax_th = parallax_th*0.25
# prepare data
shape = flow.shape[:2]
x0,y0=np.meshgrid(range(shape[1]),range(shape[0]))
x0=x0.astype(np.float32)
y0=y0.astype(np.float32)
x1=x0+flow[:,:,0]
y1=y0+flow[:,:,1]
hp0 = np.concatenate((x0[np.newaxis],y0[np.newaxis],np.ones(x1.shape)[np.newaxis]),0).reshape((3,-1))
hp1 = np.concatenate((x1[np.newaxis],y1[np.newaxis],np.ones(x1.shape)[np.newaxis]),0).reshape((3,-1))
# use bg + valid pixels to compute R/t
valid_mask = np.logical_and(bgmask_pred, ent<0).flatten()
R01,T01,H01,comp_hp1,E = pose_estimate(K0,K1,hp0,hp1,valid_mask,[0,0,0])
parallax = np.transpose((comp_hp1[:2]-hp0[:2]),[1,0]).reshape(x1.shape+(2,))
parallax_mag = np.linalg.norm(parallax[:,:,:2],2,2)
flow_mag = np.linalg.norm(flow[:,:,:2],2,2)
print('[BG Fitting] mean pp/flow: %.1f/%.1f px'%(parallax_mag[bgmask_pred].mean(), flow_mag[bgmask_pred].mean()))
reg_flow_P = triangulation(idepth, x0, y0, bl=bl, fl = K0[0,0], cx = K0[0,2], cy = K0[1,2])[:3]
if parallax_mag[bgmask_pred].mean()<parallax_th:
# static camera
print("static")
scene_type = 'H'
T01_c = [0,0,0]
else:
scene_type = 'F'
# determine scale of translation / reconstruction
aligned_mask,T01_c,ranked_p = evaluate_tri(T01,R01,K0,K1,hp0,hp1,idepth,ent,bl,inlier_th=0.01,select_th=1.2,valid_mask=valid_mask)
if not mono:
# PnP refine
aligned_mask[ranked_p[50000:]]=False
tmp = valid_mask.copy()
tmp[tmp] = aligned_mask
aligned_mask = tmp
_,rvec, T01=cv2.solvePnP(reg_flow_P.T[aligned_mask.flatten(),np.newaxis],
hp1[:2].T[aligned_mask.flatten(),np.newaxis], K0, 0,
flags=cv2.SOLVEPNP_DLS)
_,rvec, T01,=cv2.solvePnP(reg_flow_P.T[aligned_mask,np.newaxis],
hp1[:2].T[aligned_mask,np.newaxis], K0, 0,rvec, T01,useExtrinsicGuess=True,
flags=cv2.SOLVEPNP_ITERATIVE)
R01 = cv2.Rodrigues(rvec)[0].T
T01_c = -R01.dot(T01)[:,0]
RTs = []
for i in range(0,mask_pred.max()):
obj_mask = (mask_pred==i+1).flatten()
valid_mask = np.logical_and(obj_mask, ent.reshape(obj_mask.shape)<0)
if valid_mask.sum()<10 or (valid_mask.sum() / obj_mask.sum() < 0.3):
RT01 = None
else:
if tranpred is None:
R01x,T01_cx,_,comp_hp1,_ = pose_estimate(K0,K1,hp0,hp1,valid_mask,[0,0,0])
parallax = np.transpose((comp_hp1[:2]-hp0[:2]),[1,0])
parallax_mag = np.linalg.norm(parallax,2,-1)
center_coord = hp0[:,obj_mask].mean(-1)
print('[FG-%03d Fitting] center/mean pp/flow: (%d,%d)/%.1f/%.1f px'%(i,
center_coord[0], center_coord[1], parallax_mag[obj_mask].mean(),
flow_mag.flatten()[obj_mask].mean()))
if parallax_mag[obj_mask].mean()<parallax_th: RTs.append(None);continue
else:
R01x = quatpred[i].T
T01_cx = -quatpred[i].T.dot(tranpred[i][:,None])[:,0]
T01_cx = T01_cx / np.linalg.norm(T01_cx)
aligned_mask,T01_cx,ranked_p = evaluate_tri(T01_cx,R01x,K0,K1,hp0,hp1,idepth,ent,bl,inlier_th=0.01,select_th=1.2,valid_mask=valid_mask)
if T01_cx is None: RTs.append(None); continue
if not mono:
aligned_mask[ranked_p[50000:]]=False
tmp = valid_mask.copy()
tmp[tmp] = aligned_mask
obj_mask = tmp
if tranpred is None:
_,rvec, T01_cx=cv2.solvePnP(reg_flow_P.T[obj_mask,np.newaxis],
hp1[:2].T[obj_mask,np.newaxis], K0, 0,
flags=cv2.SOLVEPNP_DLS)
else:
rvec = cv2.Rodrigues(R01x.T)[0]
T01_cx = -R01x.T.dot(T01_cx[:,None])
_,rvec, T01_cx=cv2.solvePnP(reg_flow_P.T[obj_mask,np.newaxis],
hp1[:2].T[obj_mask,np.newaxis], K0, 0,rvec, T01_cx,useExtrinsicGuess=True,
flags=cv2.SOLVEPNP_ITERATIVE)
R01x = cv2.Rodrigues(rvec)[0].T
T01_cx = -R01x.dot(T01_cx)[:,0]
if T01_cx is None:
RT01=None
else:
RT01 = [R01x, T01_cx]
RTs.append(RT01)
return scene_type, T01_c, R01,RTs
def mod_flow(bgmask,mask_pred, idepth,disp1,flow,ent,bl,K0,K1,scene_type, T01_c,R01, RTs, segs_unc, oracle=None, mono=True,sintel=False):
# prepare data
idepth = idepth.copy()
flow = flow.copy()
shape = flow.shape[:2]
x0,y0=np.meshgrid(range(shape[1]),range(shape[0]))
x0=x0.astype(np.float32)
y0=y0.astype(np.float32)
x1=x0+flow[:,:,0]
y1=y0+flow[:,:,1]
hp0 = np.concatenate((x0[np.newaxis],y0[np.newaxis],np.ones(x1.shape)[np.newaxis]),0).reshape((3,-1))
hp1 = np.concatenate((x1[np.newaxis],y1[np.newaxis],np.ones(x1.shape)[np.newaxis]),0).reshape((3,-1))
reg_flow_P = triangulation(idepth, x0, y0, bl=bl, fl = K0[0,0], cx = K0[0,2], cy = K0[1,2])[:3]
# modify motion fields
if scene_type == 'H':
H,maskh = cv2.findHomography(hp0.T[ent.flatten()<0], hp1.T[ent.flatten()<0], cv2.FM_RANSAC,ransacReprojThreshold=5)
mod_mask = np.logical_and(bgmask,ent>0)
comp_hp0 = H.dot(hp0); comp_hp0 = comp_hp0/comp_hp0[-1:]
flow[mod_mask] = np.transpose((comp_hp0-hp0).reshape((3,)+shape), (1,2,0))[mod_mask]
elif scene_type == 'F':
mod_mask = bgmask
# modify disp0 | if monocular
if not (T01_c is None or np.isinf(np.linalg.norm(T01_c))):
print('[BG Update] cam trans mag: %.2f'%np.linalg.norm(T01_c))
if mono:
cams = [K0.dot(np.concatenate( (np.eye(3),np.zeros((3,1))), -1)),
K1.dot(np.concatenate( (R01.T,-R01.T.dot(T01_c[:,np.newaxis])), -1)) ]
pts = np.concatenate([hp0[:,np.newaxis,mod_mask.flatten()],
hp1[:,np.newaxis,mod_mask.flatten()]],1)
P_flow,cray = midpoint_triangulate(pts ,cams)
cflow = 1-(1/(1 + np.exp(-ent)) )
cmotion = 1-segs_unc
angle_th = 0.2
cangle = np.clip(np.arccos(np.abs(np.sum(cray[:,:,0] * cray[:,:,1],-1))) / np.pi * 180, 0,angle_th) # N,3,2
cangle = 1-np.power((cangle-angle_th)/angle_th,2)
cangle_tmp = np.zeros(shape)
cangle_tmp[mod_mask] = cangle
conf_depth = (cmotion*cflow*cangle_tmp)
lflow = (cmotion*cangle_tmp)
dcmask = np.logical_or(lflow[mod_mask]<0.25, P_flow[-1]<1e-12)
P_flow[:,dcmask] = reg_flow_P[:,mod_mask.flatten()][:,dcmask] # dont change
reg_flow_P[:,mod_mask.flatten()] = P_flow
# disp 1
reg_flow_PP = R01.T.dot(reg_flow_P)-R01.T.dot(T01_c)[:,np.newaxis]
hpp1 = K0.dot(reg_flow_PP)
hpp1 = hpp1/hpp1[-1:]
if not mono:
flow[mod_mask] = (hpp1 - hp0).T.reshape(shape+(3,))[mod_mask]
disp1[mod_mask] = bl*K0[0,0]/reg_flow_PP[-1].reshape(shape)[mod_mask]
# obj
for i in range(0,mask_pred.max()):
if sintel:break
obj_mask = mask_pred==i+1
if oracle is not None:
if (obj_mask).sum()>0:
# use midas depth
if np.median(idepth[obj_mask])==0: continue
reg_flow_P[2,obj_mask.flatten()] = bl*K0[0,0] / (np.median(oracle[obj_mask]) / np.median(idepth[obj_mask]) * idepth[obj_mask])
else:
if RTs[i] is not None:
mod_mask = obj_mask
T01_c_sub = RTs[i][1]
if not np.isinf(np.linalg.norm(T01_c_sub)):
R01_sub = RTs[i][0]
print('[FG-%03d Update] ins trans norm: %.2f'%(i,np.linalg.norm(T01_c_sub)))
if mono:
# mono replace
cams = [K0.dot(np.concatenate( (np.eye(3),np.zeros((3,1))), -1)),
K1.dot(np.concatenate( (R01_sub.T,-R01_sub.T.dot(T01_c_sub[:,np.newaxis])), -1)) ]
pts = np.concatenate([hp0[:,np.newaxis,mod_mask.flatten()],
hp1[:,np.newaxis,mod_mask.flatten()]],1)
P_flow,det = midpoint_triangulate(pts ,cams)
med = np.median(P_flow[2])
reg_flow_P[:,mod_mask.flatten()] = P_flow # modify disp0 | if monocular
print('[FG-%03d Update] size:%d/center:%.1f,%.1f/med:%.1f'%(i, P_flow.shape[1],pts[:,0].mean(-1)[0],pts[:,0].mean(-1)[1], med))
# disp 1
reg_flow_PP = R01_sub.T.dot(reg_flow_P)-R01_sub.T.dot(T01_c_sub)[:,np.newaxis]
hpp1 = K0.dot(reg_flow_PP)
hpp1 = hpp1/hpp1[-1:]
if not mono:
flow[mod_mask] = (hpp1 - hp0).T.reshape(shape+(3,))[mod_mask]
disp1[mod_mask] = bl*K0[0,0]/reg_flow_PP[-1].reshape(shape)[mod_mask]
idepth = bl*K0[0,0] / reg_flow_P[-1].reshape(shape)
return idepth,flow, disp1
def bilinear_interpolate(im, x, y):
x = np.asarray(x)
y = np.asarray(y)
x0 = np.floor(x).astype(int)
x1 = x0 + 1
y0 = np.floor(y).astype(int)
y1 = y0 + 1
x0 = np.clip(x0, 0, im.shape[1]-1);
x1 = np.clip(x1, 0, im.shape[1]-1);
y0 = np.clip(y0, 0, im.shape[0]-1);
y1 = np.clip(y1, 0, im.shape[0]-1);
Ia = im[ y0, x0 ]
Ib = im[ y1, x0 ]
Ic = im[ y0, x1 ]
Id = im[ y1, x1 ]
wa = (x1-x) * (y1-y)
wb = (x1-x) * (y-y0)
wc = (x-x0) * (y1-y)
wd = (x-x0) * (y-y0)
return wa*Ia + wb*Ib + wc*Ic + wd*Id
def extract_trajectory(cams_gt):
# world matrix of the camera object: point from world to current frame
cam_traj_gt = []
for cam in cams_gt:
cam_pos_gt = cams_gt[0].dot(np.linalg.inv(cam))[:3,-1]
cam_traj_gt.append(cam_pos_gt)
cam_traj_gt = np.stack(cam_traj_gt)
return cam_traj_gt
def extract_delta(cams_gt):
# world matrix of the camera object: point from world to current frame
cam_traj_gt = [np.zeros(3)]
for i,cam in enumerate(cams_gt):
if i==0:continue
cam_traj_gt.append(cams_gt[i-1].dot(np.linalg.inv(cam))[:3,-1])
cam_traj_gt = np.stack(cam_traj_gt)
return cam_traj_gt
def warp_flow(img, flow):
h, w = flow.shape[:2]
flow = flow.copy().astype(np.float32)
flow[:,:,0] += np.arange(w)
flow[:,:,1] += np.arange(h)[:,np.newaxis]
res = cv2.remap(img, flow, None, cv2.INTER_LINEAR)
return res
def lin_interp(shape, xyd):
import scipy
import scipy.interpolate.LinearNDInterpolator as LinearNDInterpolator
# taken from https://github.com/hunse/kitti
m, n = shape
ij, d = xyd[:, 1::-1], xyd[:, 2]
f = LinearNDInterpolator(ij, d, fill_value=0)
J, I = np.meshgrid(np.arange(n), np.arange(m))
IJ = np.vstack([I.flatten(), J.flatten()]).T
disparity = f(IJ).reshape(shape)
return disparity
def colmap_cam_read(auxdir,framename):
K = np.eye(3)
with open(auxdir, 'r') as f:
lines = f.readlines()
if len(lines) == 4:
# shared intrinsics
_,_,_,_,fl, cx, cy, _ = lines[-1].split(' ')
K[0,0] = fl
K[1,1] = fl
K[0,2] = cx
K[1,2] = cy
return K
|
banmo-main
|
third_party/vcnplus/flowutils/dydepth.py
|
import pdb
import math
import numpy as np
import cv2
import torch
import torch.nn.functional as F
import torch.nn as nn
def gaussian2D(shape, sigma=1):
m, n = [(ss - 1.) / 2. for ss in shape]
y, x = np.ogrid[-m:m+1,-n:n+1]
h = np.exp(-(x * x + y * y) / (2 * sigma * sigma))
h[h < np.finfo(h.dtype).eps * h.max()] = 0
return h
def draw_umich_gaussian(heatmap, center, radius, k=1):
diameter = 2 * radius + 1
gaussian = gaussian2D((diameter, diameter), sigma=diameter / 6)
x, y = int(center[0]), int(center[1])
height, width = heatmap.shape[0:2]
left, right = min(x, radius), min(width - x, radius + 1)
top, bottom = min(y, radius), min(height - y, radius + 1)
masked_heatmap = heatmap[y - top:y + bottom, x - left:x + right]
masked_gaussian = gaussian[radius - top:radius + bottom, radius - left:radius + right]
if min(masked_gaussian.shape) > 0 and min(masked_heatmap.shape) > 0: # TODO debug
np.maximum(masked_heatmap, masked_gaussian * k, out=masked_heatmap)
return heatmap
def gaussian_radius(det_size, min_overlap=0.7):
height, width = det_size
a1 = 1
b1 = (height + width)
c1 = width * height * (1 - min_overlap) / (1 + min_overlap)
sq1 = np.sqrt(b1 ** 2 - 4 * a1 * c1)
r1 = (b1 + sq1) / 2
a2 = 4
b2 = 2 * (height + width)
c2 = (1 - min_overlap) * width * height
sq2 = np.sqrt(b2 ** 2 - 4 * a2 * c2)
r2 = (b2 + sq2) / 2
a3 = 4 * min_overlap
b3 = -2 * min_overlap * (height + width)
c3 = (min_overlap - 1) * width * height
sq3 = np.sqrt(b3 ** 2 - 4 * a3 * c3)
r3 = (b3 + sq3) / 2
return min(r1, r2, r3)
def _gather_feat(feat, ind, mask=None):
dim = feat.size(2)
ind = ind.unsqueeze(2).expand(ind.size(0), ind.size(1), dim)
feat = feat.gather(1, ind)
if mask is not None:
mask = mask.unsqueeze(2).expand_as(feat)
feat = feat[mask]
feat = feat.view(-1, dim)
return feat
def _transpose_and_gather_feat(feat, ind):
feat = feat.permute(0, 2, 3, 1).contiguous()
feat = feat.view(feat.size(0), -1, feat.size(3))
feat = _gather_feat(feat, ind)
return feat
def get_polarmask(mask):
# single mask
mask = np.asarray(mask.cpu()).astype(np.uint8)
contour, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) # cv 4.x
#_,contour, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) # cv 3.x
#contour = [i for i in contour if len(i)>50]
img = np.zeros(mask.shape+(3,))
#import pdb; pdb.set_trace()
img = cv2.drawContours(img, contour, -1, (0, 255, 0), 3)
#cv2.imwrite('/data/gengshay/3.png',mask)
#cv2.imwrite('/data/gengshay/4.png',img)
contour.sort(key=lambda x: cv2.contourArea(x), reverse=True) #only save the biggest one
'''debug IndexError: list index out of range'''
try:
count = contour[0][:, 0, :]
except:
pdb.set_trace()
try:
center = get_centerpoint(count)
except:
x,y = count.mean(axis=0)
center=[int(x), int(y)]
contour = contour[0]
contour = torch.Tensor(contour).float()
dists, coords = get_36_coordinates(center[0], center[1], contour)
return dists, np.asarray(center)
def get_centerpoint(lis):
area = 0.0
x, y = 0.0, 0.0
a = len(lis)
for i in range(a):
lat = lis[i][0]
lng = lis[i][1]
if i == 0:
lat1 = lis[-1][0]
lng1 = lis[-1][1]
else:
lat1 = lis[i - 1][0]
lng1 = lis[i - 1][1]
fg = (lat * lng1 - lng * lat1) / 2.0
area += fg
x += fg * (lat + lat1) / 3.0
y += fg * (lng + lng1) / 3.0
x = x / area
y = y / area
return [int(x), int(y)]
def get_36_coordinates(c_x, c_y, pos_mask_contour):
ct = pos_mask_contour[:, 0, :]
x = ct[:, 0] - c_x
y = ct[:, 1] - c_y
# angle = np.arctan2(x, y)*180/np.pi
angle = torch.atan2(x, y) * 180 / np.pi
angle[angle < 0] += 360
angle = angle.int()
# dist = np.sqrt(x ** 2 + y ** 2)
dist = torch.sqrt(x ** 2 + y ** 2)
angle, idx = torch.sort(angle)
dist = dist[idx]
new_coordinate = {}
for i in range(0, 360, 10):
if i in angle:
d = dist[angle==i].max()
new_coordinate[i] = d
elif i + 1 in angle:
d = dist[angle == i+1].max()
new_coordinate[i] = d
elif i - 1 in angle:
d = dist[angle == i-1].max()
new_coordinate[i] = d
elif i + 2 in angle:
d = dist[angle == i+2].max()
new_coordinate[i] = d
elif i - 2 in angle:
d = dist[angle == i-2].max()
new_coordinate[i] = d
elif i + 3 in angle:
d = dist[angle == i+3].max()
new_coordinate[i] = d
elif i - 3 in angle:
d = dist[angle == i-3].max()
new_coordinate[i] = d
distances = torch.zeros(36)
for a in range(0, 360, 10):
if not a in new_coordinate.keys():
new_coordinate[a] = torch.tensor(1e-6)
distances[a//10] = 1e-6
else:
distances[a//10] = new_coordinate[a]
# for idx in range(36):
# dist = new_coordinate[idx * 10]
# distances[idx] = dist
return distances, new_coordinate
def polar_reg(output, mask, ind, target):
pred = _transpose_and_gather_feat(output, ind)
mask = mask.unsqueeze(2).expand_as(pred).float()
loss = F.l1_loss(pred * mask, target * mask, size_average=False)
loss = loss / (mask.sum() + 1e-4)
return loss,pred
def rigid_transform(p03d,p13d,quat, tran,mask):
mask = torch.Tensor(mask).cuda()
for it in range(mask.max().int()):
obj_mask = mask==(it+1)
# compute rigid transform
quatx = torch.nn.functional.normalize(quat[it],2,-1)
quatx = kornia.quaternion_to_rotation_matrix(quatx)
p13d[obj_mask] = quatx.matmul(p03d[obj_mask][:,:,None])[:,:,0]+tran[it]
return p03d,p13d
def pose_reg(quat, tran, pose_px_ind, ind, gt_p03d, gt_p13d, gt_depth, max_obj, p03d_feat,img):
# solve the scale
alpha = torch.ones(quat.shape[0]).cuda()
for i in range(quat.shape[0]):
d1 = p03d_feat[i,-1]
d2 = gt_p03d[i,-1].view(-1)
alpha[i] = (d1*d2).sum()/(d1*d1).sum()
#pdb.set_trace()
#from utils.fusion import pcwrite
#pc1 = np.asarray(p03d_feat[0].T.cpu())
#pc2 = np.asarray(gt_p03d[0].view(3,-1).T.cpu())
#pc1 = pc1*np.asarray(alpha[i].cpu())
#pcwrite('/data/gengshay/0.ply',np.concatenate([pc1,pc1],-1))
#pcwrite('/data/gengshay/1.ply',np.concatenate([pc2,pc2],-1))
alpha = alpha.detach()
vis = torch.zeros_like(gt_depth)
quat = _transpose_and_gather_feat(quat, ind).view(-1,4)
tran = _transpose_and_gather_feat(tran, ind).view(-1,3)
gt_p03d = gt_p03d.permute(0,2,3,1)
gt_p13d = gt_p13d.permute(0,2,3,1)
gt_depth = gt_depth.permute(0,2,3,1)
loss = []
for it,obj_mask in enumerate(pose_px_ind):
imgid = it//max_obj
if len(obj_mask)>0:
p03d = gt_p03d[imgid][obj_mask]
p13d = gt_p13d[imgid][obj_mask]
depth =gt_depth[imgid][obj_mask]
# compute rigid transform
quatx = torch.nn.functional.normalize(quat[it],2,-1)
quatx = kornia.quaternion_to_rotation_matrix(quatx)
pred_p13d = quatx.matmul(p03d[:,:,None])[:,:,0]+tran[it] * alpha[imgid]
#pdb.set_trace()
#from utils.fusion import pcwrite
#pc1 = np.asarray(p03d.cpu())
#pc2 = np.asarray(pred_p13d.detach().cpu())
#pc3 = np.asarray(p13d.cpu())
#rgb = img[imgid][obj_mask].cpu()*255
#pcwrite('/data/gengshay/0.ply',np.concatenate([pc1,rgb],-1))
#pcwrite('/data/gengshay/1.ply',np.concatenate([pc2,rgb],-1))
#pcwrite('/data/gengshay/2.ply',np.concatenate([pc3,rgb],-1))
sub_loss = ((p13d - pred_p13d)/depth).abs()
loss.append( sub_loss.mean() )
# vis
sub_vis = torch.zeros_like(vis[0,0])
sub_vis[obj_mask] = sub_loss.mean(-1)
vis[imgid,0] += sub_vis
if len(loss)>0:
loss = torch.stack(loss).mean()
else:
loss = 0
return loss, vis
def distance2mask(points, distances, angles, max_shape=None):
'''Decode distance prediction to 36 mask points
Args:
points (Tensor): Shape (n, 2), [x, y].
distance (Tensor): Distance from the given point to 36,from angle 0 to 350.
angles (Tensor):
max_shape (tuple): Shape of the image.
Returns:
Tensor: Decoded masks.
'''
num_points = points.shape[0]
points = points[:, :, None].repeat(1, 1, 36)
c_x, c_y = points[:, 0], points[:, 1]
sin = torch.sin(angles)
cos = torch.cos(angles)
sin = sin[None, :].repeat(num_points, 1)
cos = cos[None, :].repeat(num_points, 1)
x = distances * sin + c_x
y = distances * cos + c_y
if max_shape is not None:
x = x.clamp(min=0, max=max_shape[1] - 1)
y = y.clamp(min=0, max=max_shape[0] - 1)
res = torch.cat([x[:, None, :], y[:, None, :]], dim=1)
return res
def ctdet_decode(heat, wh, reg=None, cat_spec_wh=False, K=100,quat=None,tran =None,p03d=None):
batch, cat, height, width = heat.size()
# heat = torch.sigmoid(heat)
# perform nms on heatmaps
heat = _nms(heat)
scores, inds, clses, ys, xs = _topk(heat, K=K)
if reg is not None:
reg = _transpose_and_gather_feat(reg, inds)
reg = reg.view(batch, K, 2)
xs = xs.view(batch, K, 1) + reg[:, :, 0:1]
ys = ys.view(batch, K, 1) + reg[:, :, 1:2]
else:
xs = xs.view(batch, K, 1)
ys = ys.view(batch, K, 1)
scores = scores.view(batch, K, 1)
pdist_ct = torch.cat([xs,ys],-1)
pdist_ind=(ys*width+xs).long()
pdist_pred = _transpose_and_gather_feat(wh, pdist_ind[:,:,0])
if quat is not None:
quat_pred = _transpose_and_gather_feat(quat, pdist_ind[:,:,0])
tran_pred = _transpose_and_gather_feat(tran, pdist_ind[:,:,0])
pdist_mask = (scores>0.1)[:,:,0]
contour_pred = np.zeros(wh.shape[2:])
mask_pred = np.zeros(wh.shape[2:])
angles = torch.range(0, 350, 10).cuda() / 180 * math.pi
bboxs = np.zeros((0,4))
p03d = p03d[0].permute(1,2,0)
p13d = p03d.clone()
if pdist_mask.sum()>0:
contour = distance2mask(pdist_ct[0][pdist_mask[0]], pdist_pred[0][pdist_mask[0]], angles, wh.shape[2:])
contour = np.asarray(contour.permute(0,2,1).cpu()[:,:,None],dtype=int)
contour_pred = cv2.drawContours(contour_pred, contour, -1,1,3)
mask_pred,bboxs = draw_masks(mask_pred, np.asarray(pdist_ct[0][pdist_mask[0]].cpu()), contour)
#pdb.set_trace()
if quat is not None:
quat_pred = quat_pred[0][pdist_mask[0]]
tran_pred = tran_pred[0][pdist_mask[0]]
#p03d,p13d = rigid_transform(p03d,p13d,quat_pred,tran_pred, mask_pred)
pred = np.concatenate([contour_pred, mask_pred],0)
rt = {}
rt['mask'] = pred
scores = np.asarray(scores[scores>0.1].cpu())
rt['bbox'] = np.concatenate([bboxs.reshape((-1,4)), scores[:,None]],-1)
if quat is not None:
rt['quat'] = np.asarray(kornia.quaternion_to_rotation_matrix(quat_pred).cpu())
rt['tran'] = np.asarray(tran_pred.cpu())
#rt['p03d'] = np.asarray(p03d.cpu())
#rt['p13d'] = np.asarray(p13d.cpu())
return rt
def label_colormap():
"""Creates a label colormap used in CITYSCAPES segmentation benchmark.
Returns:
A colormap for visualizing segmentation results.
"""
colormap = np.zeros((256, 3), dtype=np.uint8)
colormap[0] = [128, 64, 128]
colormap[1] = [255, 0, 0]
colormap[2] = [0, 255, 0]
colormap[3] = [250, 250, 0]
colormap[4] = [0, 215, 230]
colormap[5] = [190, 153, 153]
colormap[6] = [250, 170, 30]
colormap[7] = [102, 102, 156]
colormap[8] = [107, 142, 35]
colormap[9] = [152, 251, 152]
colormap[10] = [70, 130, 180]
colormap[11] = [220, 20, 60]
colormap[12] = [0, 0, 230]
colormap[13] = [0, 0, 142]
colormap[14] = [0, 0, 70]
colormap[15] = [0, 60, 100]
colormap[16] = [0, 80, 100]
colormap[17] = [244, 35, 232]
colormap[18] = [119, 11, 32]
return colormap
def draw_masks(mask, ct, contour):
colormap = label_colormap()
bboxs = []
for i in np.argsort(ct[:,1]):
mask = cv2.drawContours(mask, contour[i:i+1], -1,float(i+1),-1) # x,y
bboxs.append(np.hstack( (contour[i,:,0].min(0), contour[i,:,0].max(0)) )[None])
#cv2.imwrite('/data/gengshay/0.png',mask)
return mask, np.concatenate(bboxs,0)
def _topk(scores, K=40):
batch, cat, height, width = scores.size()
topk_scores, topk_inds = torch.topk(scores.view(batch, cat, -1), K)
topk_inds = topk_inds % (height * width)
topk_ys = (topk_inds / width).int().float()
topk_xs = (topk_inds % width).int().float()
topk_score, topk_ind = torch.topk(topk_scores.view(batch, -1), K)
topk_clses = (topk_ind / K).int()
topk_inds = _gather_feat(
topk_inds.view(batch, -1, 1), topk_ind).view(batch, K)
topk_ys = _gather_feat(topk_ys.view(batch, -1, 1), topk_ind).view(batch, K)
topk_xs = _gather_feat(topk_xs.view(batch, -1, 1), topk_ind).view(batch, K)
return topk_score, topk_inds, topk_clses, topk_ys, topk_xs
def _nms(heat, kernel=3):
pad = (kernel - 1) // 2
hmax = nn.functional.max_pool2d(
heat, (kernel, kernel), stride=1, padding=pad)
keep = (hmax == heat).float()
return heat * keep
|
banmo-main
|
third_party/vcnplus/flowutils/detlib.py
|
#! /usr/bin/env python2
"""
I/O script to save and load the data coming with the MPI-Sintel low-level
computer vision benchmark.
For more details about the benchmark, please visit www.mpi-sintel.de
CHANGELOG:
v1.0 (2015/02/03): First release
Copyright (c) 2015 Jonas Wulff
Max Planck Institute for Intelligent Systems, Tuebingen, Germany
"""
# Requirements: Numpy as PIL/Pillow
import numpy as np
from PIL import Image
# Check for endianness, based on Daniel Scharstein's optical flow code.
# Using little-endian architecture, these two should be equal.
TAG_FLOAT = 202021.25
TAG_CHAR = 'PIEH'
def flow_read(filename):
""" Read optical flow from file, return (U,V) tuple.
Original code by Deqing Sun, adapted from Daniel Scharstein.
"""
f = open(filename,'rb')
check = np.fromfile(f,dtype=np.float32,count=1)[0]
assert check == TAG_FLOAT, ' flow_read:: Wrong tag in flow file (should be: {0}, is: {1}). Big-endian machine? '.format(TAG_FLOAT,check)
width = np.fromfile(f,dtype=np.int32,count=1)[0]
height = np.fromfile(f,dtype=np.int32,count=1)[0]
size = width*height
assert width > 0 and height > 0 and size > 1 and size < 100000000, ' flow_read:: Wrong input size (width = {0}, height = {1}).'.format(width,height)
tmp = np.fromfile(f,dtype=np.float32,count=-1).reshape((height,width*2))
u = tmp[:,np.arange(width)*2]
v = tmp[:,np.arange(width)*2 + 1]
return u,v
def flow_write(filename,uv,v=None):
""" Write optical flow to file.
If v is None, uv is assumed to contain both u and v channels,
stacked in depth.
Original code by Deqing Sun, adapted from Daniel Scharstein.
"""
nBands = 2
if v is None:
assert(uv.ndim == 3)
assert(uv.shape[2] == 2)
u = uv[:,:,0]
v = uv[:,:,1]
else:
u = uv
assert(u.shape == v.shape)
height,width = u.shape
f = open(filename,'wb')
# write the header
f.write(TAG_CHAR)
np.array(width).astype(np.int32).tofile(f)
np.array(height).astype(np.int32).tofile(f)
# arrange into matrix form
tmp = np.zeros((height, width*nBands))
tmp[:,np.arange(width)*2] = u
tmp[:,np.arange(width)*2 + 1] = v
tmp.astype(np.float32).tofile(f)
f.close()
def depth_read(filename):
""" Read depth data from file, return as numpy array. """
f = open(filename,'rb')
check = np.fromfile(f,dtype=np.float32,count=1)[0]
assert check == TAG_FLOAT, ' depth_read:: Wrong tag in flow file (should be: {0}, is: {1}). Big-endian machine? '.format(TAG_FLOAT,check)
width = np.fromfile(f,dtype=np.int32,count=1)[0]
height = np.fromfile(f,dtype=np.int32,count=1)[0]
size = width*height
assert width > 0 and height > 0 and size > 1 and size < 100000000, ' depth_read:: Wrong input size (width = {0}, height = {1}).'.format(width,height)
depth = np.fromfile(f,dtype=np.float32,count=-1).reshape((height,width))
return depth
def depth_write(filename, depth):
""" Write depth to file. """
height,width = depth.shape[:2]
f = open(filename,'wb')
# write the header
f.write(TAG_CHAR)
np.array(width).astype(np.int32).tofile(f)
np.array(height).astype(np.int32).tofile(f)
depth.astype(np.float32).tofile(f)
f.close()
def disparity_write(filename,disparity,bitdepth=16):
""" Write disparity to file.
bitdepth can be either 16 (default) or 32.
The maximum disparity is 1024, since the image width in Sintel
is 1024.
"""
d = disparity.copy()
# Clip disparity.
d[d>1024] = 1024
d[d<0] = 0
d_r = (d / 4.0).astype('uint8')
d_g = ((d * (2.0**6)) % 256).astype('uint8')
out = np.zeros((d.shape[0],d.shape[1],3),dtype='uint8')
out[:,:,0] = d_r
out[:,:,1] = d_g
if bitdepth > 16:
d_b = (d * (2**14) % 256).astype('uint8')
out[:,:,2] = d_b
Image.fromarray(out,'RGB').save(filename,'PNG')
def disparity_read(filename):
""" Return disparity read from filename. """
f_in = np.array(Image.open(filename))
d_r = f_in[:,:,0].astype('float64')
d_g = f_in[:,:,1].astype('float64')
d_b = f_in[:,:,2].astype('float64')
depth = d_r * 4 + d_g / (2**6) + d_b / (2**14)
return depth
#def cam_read(filename):
# """ Read camera data, return (M,N) tuple.
#
# M is the intrinsic matrix, N is the extrinsic matrix, so that
#
# x = M*N*X,
# with x being a point in homogeneous image pixel coordinates, X being a
# point in homogeneous world coordinates.
# """
# txtdata = np.loadtxt(filename)
# intrinsic = txtdata[0,:9].reshape((3,3))
# extrinsic = textdata[1,:12].reshape((3,4))
# return intrinsic,extrinsic
#
#
#def cam_write(filename,M,N):
# """ Write intrinsic matrix M and extrinsic matrix N to file. """
# Z = np.zeros((2,12))
# Z[0,:9] = M.ravel()
# Z[1,:12] = N.ravel()
# np.savetxt(filename,Z)
def cam_read(filename):
""" Read camera data, return (M,N) tuple.
M is the intrinsic matrix, N is the extrinsic matrix, so that
x = M*N*X,
with x being a point in homogeneous image pixel coordinates, X being a
point in homogeneous world coordinates.
"""
f = open(filename,'rb')
check = np.fromfile(f,dtype=np.float32,count=1)[0]
assert check == TAG_FLOAT, ' cam_read:: Wrong tag in flow file (should be: {0}, is: {1}). Big-endian machine? '.format(TAG_FLOAT,check)
M = np.fromfile(f,dtype='float64',count=9).reshape((3,3))
N = np.fromfile(f,dtype='float64',count=12).reshape((3,4))
return M,N
def cam_write(filename, M, N):
""" Write intrinsic matrix M and extrinsic matrix N to file. """
f = open(filename,'wb')
# write the header
f.write(TAG_CHAR)
M.astype('float64').tofile(f)
N.astype('float64').tofile(f)
f.close()
def segmentation_write(filename,segmentation):
""" Write segmentation to file. """
segmentation_ = segmentation.astype('int32')
seg_r = np.floor(segmentation_ / (256**2)).astype('uint8')
seg_g = np.floor((segmentation_ % (256**2)) / 256).astype('uint8')
seg_b = np.floor(segmentation_ % 256).astype('uint8')
out = np.zeros((segmentation.shape[0],segmentation.shape[1],3),dtype='uint8')
out[:,:,0] = seg_r
out[:,:,1] = seg_g
out[:,:,2] = seg_b
Image.fromarray(out,'RGB').save(filename,'PNG')
def segmentation_read(filename):
""" Return disparity read from filename. """
f_in = np.array(Image.open(filename))
seg_r = f_in[:,:,0].astype('int32')
seg_g = f_in[:,:,1].astype('int32')
seg_b = f_in[:,:,2].astype('int32')
segmentation = (seg_r * 256 + seg_g) * 256 + seg_b
return segmentation
|
banmo-main
|
third_party/vcnplus/flowutils/sintel_io.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torchvision.models as models
import torch
import torch.nn as nn
import os
from .networks.msra_resnet import get_pose_net
from .networks.dlav0 import get_pose_net as get_dlav0
from .networks.pose_dla_dcn import get_pose_net as get_dla_dcn
from .networks.resnet_dcn import get_pose_net as get_pose_net_dcn
from .networks.large_hourglass import get_large_hourglass_net
_model_factory = {
'res': get_pose_net, # default Resnet with deconv
'dlav0': get_dlav0, # default DLAup
'dla': get_dla_dcn,
'resdcn': get_pose_net_dcn,
'hourglass': get_large_hourglass_net,
}
def create_model(arch, heads, head_conv,num_input):
num_layers = int(arch[arch.find('_') + 1:]) if '_' in arch else 0
arch = arch[:arch.find('_')] if '_' in arch else arch
get_model = _model_factory[arch]
model = get_model(num_layers=num_layers, heads=heads, head_conv=head_conv,num_input=num_input)
return model
def load_model(model, model_path, optimizer=None, resume=False,
lr=None, lr_step=None):
start_epoch = 0
checkpoint = torch.load(model_path, map_location=lambda storage, loc: storage)
print('loaded {}, epoch {}'.format(model_path, checkpoint['epoch']))
state_dict_ = checkpoint['state_dict']
state_dict = {}
# convert data_parallal to model
for k in state_dict_:
if k.startswith('module') and not k.startswith('module_list'):
state_dict[k[7:]] = state_dict_[k]
else:
state_dict[k] = state_dict_[k]
model_state_dict = model.state_dict()
# check loaded parameters and created model parameters
msg = 'If you see this, your model does not fully load the ' + \
'pre-trained weight. Please make sure ' + \
'you have correctly specified --arch xxx ' + \
'or set the correct --num_classes for your own dataset.'
for k in state_dict:
if k in model_state_dict:
if state_dict[k].shape != model_state_dict[k].shape:
print('Skip loading parameter {}, required shape{}, '\
'loaded shape{}. {}'.format(
k, model_state_dict[k].shape, state_dict[k].shape, msg))
state_dict[k] = model_state_dict[k]
else:
print('Drop parameter {}.'.format(k) + msg)
for k in model_state_dict:
if not (k in state_dict):
print('No param {}.'.format(k) + msg)
state_dict[k] = model_state_dict[k]
model.load_state_dict(state_dict, strict=False)
# resume optimizer parameters
if optimizer is not None and resume:
if 'optimizer' in checkpoint:
optimizer.load_state_dict(checkpoint['optimizer'])
start_epoch = checkpoint['epoch']
start_lr = lr
for step in lr_step:
if start_epoch >= step:
start_lr *= 0.1
for param_group in optimizer.param_groups:
param_group['lr'] = start_lr
print('Resumed optimizer with start lr', start_lr)
else:
print('No optimizer parameters in checkpoint.')
if optimizer is not None:
return model, optimizer, start_epoch
else:
return model
def save_model(path, epoch, model, optimizer=None):
if isinstance(model, torch.nn.DataParallel):
state_dict = model.module.state_dict()
else:
state_dict = model.state_dict()
data = {'epoch': epoch,
'state_dict': state_dict}
if not (optimizer is None):
data['optimizer'] = optimizer.state_dict()
torch.save(data, path)
|
banmo-main
|
third_party/vcnplus/models/det.py
|
# ------------------------------------------------------------------------------
# Portions of this code are from
# CornerNet (https://github.com/princeton-vl/CornerNet)
# Copyright (c) 2018, University of Michigan
# Licensed under the BSD 3-Clause License
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pdb
import torch
import torch.nn as nn
from .det_utils import _transpose_and_gather_feat
import torch.nn.functional as F
def _slow_neg_loss(pred, gt):
'''focal loss from CornerNet'''
pos_inds = gt.eq(1)
neg_inds = gt.lt(1)
neg_weights = torch.pow(1 - gt[neg_inds], 4)
loss = 0
pos_pred = pred[pos_inds]
neg_pred = pred[neg_inds]
pos_loss = torch.log(pos_pred) * torch.pow(1 - pos_pred, 2)
neg_loss = torch.log(1 - neg_pred) * torch.pow(neg_pred, 2) * neg_weights
num_pos = pos_inds.float().sum()
pos_loss = pos_loss.sum()
neg_loss = neg_loss.sum()
if pos_pred.nelement() == 0:
loss = loss - neg_loss
else:
loss = loss - (pos_loss + neg_loss) / num_pos
return loss
def _neg_loss(pred, gt, heat_logits):
''' Modified focal loss. Exactly the same as CornerNet.
Runs faster and costs a little bit more memory
Arguments:
pred (batch x c x h x w)
gt_regr (batch x c x h x w)
'''
pos_inds = gt.eq(1).float()
neg_inds = gt.lt(1).float()
neg_weights = torch.pow(1 - gt, 4)
loss = 0
logpred = torch.nn.functional.log_softmax(heat_logits,1)
pos_loss = logpred[:,0:1] * torch.pow(1 - pred, 2) * pos_inds
neg_loss = logpred[:,1:2] * torch.pow(pred, 2) * neg_weights * neg_inds
num_pos = pos_inds.float().sum()
pos_loss = pos_loss.sum()
neg_loss = neg_loss.sum()
if num_pos == 0:
loss = loss - neg_loss
else:
loss = loss - (pos_loss + neg_loss) / num_pos
return loss
def _not_faster_neg_loss(pred, gt):
pos_inds = gt.eq(1).float()
neg_inds = gt.lt(1).float()
num_pos = pos_inds.float().sum()
neg_weights = torch.pow(1 - gt, 4)
loss = 0
trans_pred = pred * neg_inds + (1 - pred) * pos_inds
weight = neg_weights * neg_inds + pos_inds
all_loss = torch.log(1 - trans_pred) * torch.pow(trans_pred, 2) * weight
all_loss = all_loss.sum()
if num_pos > 0:
all_loss /= num_pos
loss -= all_loss
return loss
def _slow_reg_loss(regr, gt_regr, mask):
num = mask.float().sum()
mask = mask.unsqueeze(2).expand_as(gt_regr)
regr = regr[mask]
gt_regr = gt_regr[mask]
regr_loss = nn.functional.smooth_l1_loss(regr, gt_regr, size_average=False)
regr_loss = regr_loss / (num + 1e-4)
return regr_loss
def _reg_loss(regr, gt_regr, mask):
''' L1 regression loss
Arguments:
regr (batch x max_objects x dim)
gt_regr (batch x max_objects x dim)
mask (batch x max_objects)
'''
num = mask.float().sum()
mask = mask.unsqueeze(2).expand_as(gt_regr).float()
regr = regr * mask
gt_regr = gt_regr * mask
regr_loss = nn.functional.smooth_l1_loss(regr, gt_regr, size_average=False)
regr_loss = regr_loss / (num + 1e-4)
return regr_loss
class FocalLoss(nn.Module):
'''nn.Module warpper for focal loss'''
def __init__(self):
super(FocalLoss, self).__init__()
self.neg_loss = _neg_loss
def forward(self, out, target, logits):
return self.neg_loss(out, target, logits)
class RegLoss(nn.Module):
'''Regression loss for an output tensor
Arguments:
output (batch x dim x h x w)
mask (batch x max_objects)
ind (batch x max_objects)
target (batch x max_objects x dim)
'''
def __init__(self):
super(RegLoss, self).__init__()
def forward(self, output, mask, ind, target):
pred = _transpose_and_gather_feat(output, ind)
loss = _reg_loss(pred, target, mask)
return loss
class RegL1Loss(nn.Module):
def __init__(self):
super(RegL1Loss, self).__init__()
def forward(self, output, mask, ind, target):
pred = _transpose_and_gather_feat(output, ind)
mask = mask.unsqueeze(2).expand_as(pred).float()
# loss = F.l1_loss(pred * mask, target * mask, reduction='elementwise_mean')
loss = F.l1_loss(pred * mask, target * mask, size_average=False)
loss = loss / (mask.sum() + 1e-4)
return loss
class NormRegL1Loss(nn.Module):
def __init__(self):
super(NormRegL1Loss, self).__init__()
def forward(self, output, mask, ind, target):
pred = _transpose_and_gather_feat(output, ind)
mask = mask.unsqueeze(2).expand_as(pred).float()
# loss = F.l1_loss(pred * mask, target * mask, reduction='elementwise_mean')
pred = pred / (target + 1e-4)
target = target * 0 + 1
loss = F.l1_loss(pred * mask, target * mask, size_average=False)
loss = loss / (mask.sum() + 1e-4)
return loss
class RegWeightedL1Loss(nn.Module):
def __init__(self):
super(RegWeightedL1Loss, self).__init__()
def forward(self, output, mask, ind, target):
pred = _transpose_and_gather_feat(output, ind)
mask = mask.float()
# loss = F.l1_loss(pred * mask, target * mask, reduction='elementwise_mean')
loss = F.l1_loss(pred * mask, target * mask, size_average=False)
loss = loss / (mask.sum() + 1e-4)
return loss
class L1Loss(nn.Module):
def __init__(self):
super(L1Loss, self).__init__()
def forward(self, output, mask, ind, target):
pred = _transpose_and_gather_feat(output, ind)
mask = mask.unsqueeze(2).expand_as(pred).float()
loss = F.l1_loss(pred * mask, target * mask, reduction='elementwise_mean')
return loss
class BinRotLoss(nn.Module):
def __init__(self):
super(BinRotLoss, self).__init__()
def forward(self, output, mask, ind, rotbin, rotres):
pred = _transpose_and_gather_feat(output, ind)
loss = compute_rot_loss(pred, rotbin, rotres, mask)
return loss
def compute_res_loss(output, target):
return F.smooth_l1_loss(output, target, reduction='elementwise_mean')
# TODO: weight
def compute_bin_loss(output, target, mask):
mask = mask.expand_as(output)
output = output * mask.float()
return F.cross_entropy(output, target, reduction='elementwise_mean')
def compute_rot_loss(output, target_bin, target_res, mask):
# output: (B, 128, 8) [bin1_cls[0], bin1_cls[1], bin1_sin, bin1_cos,
# bin2_cls[0], bin2_cls[1], bin2_sin, bin2_cos]
# target_bin: (B, 128, 2) [bin1_cls, bin2_cls]
# target_res: (B, 128, 2) [bin1_res, bin2_res]
# mask: (B, 128, 1)
# import pdb; pdb.set_trace()
output = output.view(-1, 8)
target_bin = target_bin.view(-1, 2)
target_res = target_res.view(-1, 2)
mask = mask.view(-1, 1)
loss_bin1 = compute_bin_loss(output[:, 0:2], target_bin[:, 0], mask)
loss_bin2 = compute_bin_loss(output[:, 4:6], target_bin[:, 1], mask)
loss_res = torch.zeros_like(loss_bin1)
if target_bin[:, 0].nonzero().shape[0] > 0:
idx1 = target_bin[:, 0].nonzero()[:, 0]
valid_output1 = torch.index_select(output, 0, idx1.long())
valid_target_res1 = torch.index_select(target_res, 0, idx1.long())
loss_sin1 = compute_res_loss(
valid_output1[:, 2], torch.sin(valid_target_res1[:, 0]))
loss_cos1 = compute_res_loss(
valid_output1[:, 3], torch.cos(valid_target_res1[:, 0]))
loss_res += loss_sin1 + loss_cos1
if target_bin[:, 1].nonzero().shape[0] > 0:
idx2 = target_bin[:, 1].nonzero()[:, 0]
valid_output2 = torch.index_select(output, 0, idx2.long())
valid_target_res2 = torch.index_select(target_res, 0, idx2.long())
loss_sin2 = compute_res_loss(
valid_output2[:, 6], torch.sin(valid_target_res2[:, 1]))
loss_cos2 = compute_res_loss(
valid_output2[:, 7], torch.cos(valid_target_res2[:, 1]))
loss_res += loss_sin2 + loss_cos2
return loss_bin1 + loss_bin2 + loss_res
|
banmo-main
|
third_party/vcnplus/models/det_losses.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
import torch.nn as nn
def _sigmoid(x):
y = torch.clamp(x.sigmoid_(), min=1e-4, max=1-1e-4)
return y
def _gather_feat(feat, ind, mask=None):
dim = feat.size(2)
ind = ind.unsqueeze(2).expand(ind.size(0), ind.size(1), dim)
feat = feat.gather(1, ind)
if mask is not None:
mask = mask.unsqueeze(2).expand_as(feat)
feat = feat[mask]
feat = feat.view(-1, dim)
return feat
def _transpose_and_gather_feat(feat, ind):
feat = feat.permute(0, 2, 3, 1).contiguous()
feat = feat.view(feat.size(0), -1, feat.size(3))
feat = _gather_feat(feat, ind)
return feat
def flip_tensor(x):
return torch.flip(x, [3])
# tmp = x.detach().cpu().numpy()[..., ::-1].copy()
# return torch.from_numpy(tmp).to(x.device)
def flip_lr(x, flip_idx):
tmp = x.detach().cpu().numpy()[..., ::-1].copy()
shape = tmp.shape
for e in flip_idx:
tmp[:, e[0], ...], tmp[:, e[1], ...] = \
tmp[:, e[1], ...].copy(), tmp[:, e[0], ...].copy()
return torch.from_numpy(tmp.reshape(shape)).to(x.device)
def flip_lr_off(x, flip_idx):
tmp = x.detach().cpu().numpy()[..., ::-1].copy()
shape = tmp.shape
tmp = tmp.reshape(tmp.shape[0], 17, 2,
tmp.shape[2], tmp.shape[3])
tmp[:, :, 0, :, :] *= -1
for e in flip_idx:
tmp[:, e[0], ...], tmp[:, e[1], ...] = \
tmp[:, e[1], ...].copy(), tmp[:, e[0], ...].copy()
return torch.from_numpy(tmp.reshape(shape)).to(x.device)
|
banmo-main
|
third_party/vcnplus/models/det_utils.py
|
banmo-main
|
third_party/vcnplus/models/__init__.py
|
|
"""
Copyright (C) 2019 NVIDIA Corporation. All rights reserved.
Licensed under the CC BY-NC-SA 4.0 license (https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode).
This file incorporates work covered by the following copyright and permission notice:
Copyright (c) 2018 Ignacio Rocco
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Source: https://github.com/ignacio-rocco/weakalign/blob/master/model/cnn_geometric_model.py
"""
import torch
import torch.nn as nn
from torchvision import models
def featureL2Norm(feature):
epsilon = 1e-6
norm = torch.pow(torch.sum(torch.pow(feature, 2), 1) +
epsilon, 0.5).unsqueeze(1).expand_as(feature)
return torch.div(feature, norm)
class FeatureExtraction(torch.nn.Module):
def __init__(self, train_fe=False, feature_extraction_cnn='vgg19', normalization=True, last_layer='', use_cuda=True):
super(FeatureExtraction, self).__init__()
self.normalization = normalization
# multiple extracting layers
last_layer = last_layer.split(',')
if feature_extraction_cnn == 'vgg16':
self.model = models.vgg16(pretrained=True)
# keep feature extraction network up to indicated layer
vgg_feature_layers = ['conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1', 'conv2_1',
'relu2_1', 'conv2_2', 'relu2_2', 'pool2', 'conv3_1', 'relu3_1',
'conv3_2', 'relu3_2', 'conv3_3', 'relu3_3', 'pool3', 'conv4_1',
'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3', 'relu4_3', 'pool4',
'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3', 'relu5_3', 'pool5']
start_index = 0
self.model_list = []
for l in last_layer:
if l == '':
l = 'pool4'
layer_idx = vgg_feature_layers.index(l)
assert layer_idx >= start_index, 'layer order wrong!'
model = nn.Sequential(
*list(self.model.features.children())[start_index:layer_idx + 1])
self.model_list.append(model)
start_index = layer_idx + 1
if feature_extraction_cnn == 'vgg19':
self.model = models.vgg19(pretrained=True)
# keep feature extraction network up to indicated layer
vgg_feature_layers = ['conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1',
'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3', 'relu3_3', 'conv3_4', 'relu3_4', 'pool3',
'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3', 'relu4_3', 'conv4_4', 'relu4_4', 'pool4',
'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3', 'relu5_3', 'conv5_4', 'relu5_4', 'pool5']
vgg_output_dim = [64, 64, 64, 64, 64,
128, 128, 128, 128, 128,
256, 256, 256, 256, 256, 256, 256, 256, 256,
512, 512, 512, 512, 512, 512, 512, 512, 512,
512, 512, 512, 512, 512, 512, 512, 512, 512]
start_index = 0
self.model_list = []
self.out_dim = 0
for l in last_layer:
if l == '':
l = 'relu5_4'
layer_idx = vgg_feature_layers.index(l)
assert layer_idx >= start_index, 'layer order wrong!'
self.out_dim += vgg_output_dim[layer_idx]
model = nn.Sequential(
*list(self.model.features.children())[start_index:layer_idx + 1])
self.model_list.append(model)
start_index = layer_idx + 1
if feature_extraction_cnn == 'resnet101':
self.model = models.resnet101(pretrained=True)
resnet_feature_layers = ['conv1',
'bn1',
'relu',
'maxpool',
'layer1',
'layer2',
'layer3',
'layer4']
if last_layer == '':
last_layer = 'layer3'
last_layer_idx = resnet_feature_layers.index(last_layer)
resnet_module_list = [self.model.conv1,
self.model.bn1,
self.model.relu,
self.model.maxpool,
self.model.layer1,
self.model.layer2,
self.model.layer3,
self.model.layer4]
self.model = nn.Sequential(
*resnet_module_list[:last_layer_idx + 1])
if feature_extraction_cnn == 'resnet101_v2':
self.model = models.resnet101(pretrained=True)
# keep feature extraction network up to pool4 (last layer - 7)
self.model = nn.Sequential(*list(self.model.children())[:-3])
if feature_extraction_cnn == 'densenet201':
self.model = models.densenet201(pretrained=True)
# keep feature extraction network up to transitionlayer2
self.model = nn.Sequential(
*list(self.model.features.children())[:-4])
if not train_fe:
# freeze parameters
for param in self.model.parameters():
param.requires_grad = False
# move to GPU
if use_cuda:
self.model_list = [model.cuda() for model in self.model_list]
def forward(self, image_batch):
features_list = []
features = image_batch
for model in self.model_list:
features = model(features)
if self.normalization:
features = featureL2Norm(features)
features_list.append(features)
return features_list
|
banmo-main
|
third_party/vcnplus/models/feature_extraction.py
|
from __future__ import print_function
import torch
import torch.nn as nn
import torch.utils.data
from torch.autograd import Variable
import torch.nn.functional as F
import math
import numpy as np
import pdb
#import kornia
class residualBlock(nn.Module):
expansion = 1
def __init__(self, in_channels, n_filters, stride=1, downsample=None,dilation=1,with_bn=True):
super(residualBlock, self).__init__()
if dilation > 1:
padding = dilation
else:
padding = 1
if with_bn:
self.convbnrelu1 = conv2DBatchNormRelu(in_channels, n_filters, 3, stride, padding, dilation=dilation)
self.convbn2 = conv2DBatchNorm(n_filters, n_filters, 3, 1, 1)
else:
self.convbnrelu1 = conv2DBatchNormRelu(in_channels, n_filters, 3, stride, padding, dilation=dilation,with_bn=False)
self.convbn2 = conv2DBatchNorm(n_filters, n_filters, 3, 1, 1, with_bn=False)
self.downsample = downsample
self.relu = nn.LeakyReLU(0.1, inplace=True)
def forward(self, x):
residual = x
out = self.convbnrelu1(x)
out = self.convbn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
return self.relu(out)
def conv(in_planes, out_planes, kernel_size=3, stride=1, padding=1, dilation=1):
return nn.Sequential(
nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, bias=True),
nn.BatchNorm2d(out_planes),
nn.LeakyReLU(0.1,inplace=True))
class conv2DBatchNorm(nn.Module):
def __init__(self, in_channels, n_filters, k_size, stride, padding, dilation=1, with_bn=True):
super(conv2DBatchNorm, self).__init__()
bias = not with_bn
if dilation > 1:
conv_mod = nn.Conv2d(int(in_channels), int(n_filters), kernel_size=k_size,
padding=padding, stride=stride, bias=bias, dilation=dilation)
else:
conv_mod = nn.Conv2d(int(in_channels), int(n_filters), kernel_size=k_size,
padding=padding, stride=stride, bias=bias, dilation=1)
if with_bn:
self.cb_unit = nn.Sequential(conv_mod,
nn.BatchNorm2d(int(n_filters)),)
else:
self.cb_unit = nn.Sequential(conv_mod,)
def forward(self, inputs):
outputs = self.cb_unit(inputs)
return outputs
class conv2DBatchNormRelu(nn.Module):
def __init__(self, in_channels, n_filters, k_size, stride, padding, dilation=1, with_bn=True):
super(conv2DBatchNormRelu, self).__init__()
bias = not with_bn
if dilation > 1:
conv_mod = nn.Conv2d(int(in_channels), int(n_filters), kernel_size=k_size,
padding=padding, stride=stride, bias=bias, dilation=dilation)
else:
conv_mod = nn.Conv2d(int(in_channels), int(n_filters), kernel_size=k_size,
padding=padding, stride=stride, bias=bias, dilation=1)
if with_bn:
self.cbr_unit = nn.Sequential(conv_mod,
nn.BatchNorm2d(int(n_filters)),
nn.LeakyReLU(0.1, inplace=True),)
else:
self.cbr_unit = nn.Sequential(conv_mod,
nn.LeakyReLU(0.1, inplace=True),)
def forward(self, inputs):
outputs = self.cbr_unit(inputs)
return outputs
class pyramidPooling(nn.Module):
def __init__(self, in_channels, with_bn=True, levels=4):
super(pyramidPooling, self).__init__()
self.levels = levels
self.paths = []
for i in range(levels):
self.paths.append(conv2DBatchNormRelu(in_channels, in_channels, 1, 1, 0, with_bn=with_bn))
self.path_module_list = nn.ModuleList(self.paths)
self.relu = nn.LeakyReLU(0.1, inplace=True)
def forward(self, x):
h, w = x.shape[2:]
k_sizes = []
strides = []
for pool_size in np.linspace(1,min(h,w)//2,self.levels,dtype=int):
k_sizes.append((int(h/pool_size), int(w/pool_size)))
strides.append((int(h/pool_size), int(w/pool_size)))
k_sizes = k_sizes[::-1]
strides = strides[::-1]
pp_sum = x
for i, module in enumerate(self.path_module_list):
out = F.avg_pool2d(x, k_sizes[i], stride=strides[i], padding=0)
out = module(out)
out = F.upsample(out, size=(h,w), mode='bilinear')
pp_sum = pp_sum + 1./self.levels*out
pp_sum = self.relu(pp_sum/2.)
return pp_sum
class pspnet(nn.Module):
"""
Modified PSPNet. https://github.com/meetshah1995/pytorch-semseg/blob/master/ptsemseg/models/pspnet.py
"""
def __init__(self, is_proj=True,groups=1):
super(pspnet, self).__init__()
self.inplanes = 32
self.is_proj = is_proj
# Encoder
self.convbnrelu1_1 = conv2DBatchNormRelu(in_channels=3, k_size=3, n_filters=16,
padding=1, stride=2)
self.convbnrelu1_2 = conv2DBatchNormRelu(in_channels=16, k_size=3, n_filters=16,
padding=1, stride=1)
self.convbnrelu1_3 = conv2DBatchNormRelu(in_channels=16, k_size=3, n_filters=32,
padding=1, stride=1)
# Vanilla Residual Blocks
self.res_block3 = self._make_layer(residualBlock,64,1,stride=2)
self.res_block5 = self._make_layer(residualBlock,128,1,stride=2)
self.res_block6 = self._make_layer(residualBlock,128,1,stride=2)
self.res_block7 = self._make_layer(residualBlock,128,1,stride=2)
self.pyramid_pooling = pyramidPooling(128, levels=3)
# Iconvs
self.upconv6 = nn.Sequential(nn.Upsample(scale_factor=2),
conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1))
self.iconv5 = conv2DBatchNormRelu(in_channels=192, k_size=3, n_filters=128,
padding=1, stride=1)
self.upconv5 = nn.Sequential(nn.Upsample(scale_factor=2),
conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1))
self.iconv4 = conv2DBatchNormRelu(in_channels=192, k_size=3, n_filters=128,
padding=1, stride=1)
self.upconv4 = nn.Sequential(nn.Upsample(scale_factor=2),
conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1))
self.iconv3 = conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1)
self.upconv3 = nn.Sequential(nn.Upsample(scale_factor=2),
conv2DBatchNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1))
self.iconv2 = conv2DBatchNormRelu(in_channels=64, k_size=3, n_filters=64,
padding=1, stride=1)
if self.is_proj:
self.proj6 = conv2DBatchNormRelu(in_channels=128,k_size=1,n_filters=128//groups, padding=0,stride=1)
self.proj5 = conv2DBatchNormRelu(in_channels=128,k_size=1,n_filters=128//groups, padding=0,stride=1)
self.proj4 = conv2DBatchNormRelu(in_channels=128,k_size=1,n_filters=128//groups, padding=0,stride=1)
self.proj3 = conv2DBatchNormRelu(in_channels=64, k_size=1,n_filters=64//groups, padding=0,stride=1)
self.proj2 = conv2DBatchNormRelu(in_channels=64, k_size=1,n_filters=64//groups, padding=0,stride=1)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if hasattr(m.bias,'data'):
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
# H, W -> H/2, W/2
conv1 = self.convbnrelu1_1(x)
conv1 = self.convbnrelu1_2(conv1)
conv1 = self.convbnrelu1_3(conv1)
## H/2, W/2 -> H/4, W/4
pool1 = F.max_pool2d(conv1, 3, 2, 1)
# H/4, W/4 -> H/16, W/16
rconv3 = self.res_block3(pool1)
conv4 = self.res_block5(rconv3)
conv5 = self.res_block6(conv4)
conv6 = self.res_block7(conv5)
conv6 = self.pyramid_pooling(conv6)
conv6x = F.upsample(conv6, [conv5.size()[2],conv5.size()[3]],mode='bilinear')
concat5 = torch.cat((conv5,self.upconv6[1](conv6x)),dim=1)
conv5 = self.iconv5(concat5)
conv5x = F.upsample(conv5, [conv4.size()[2],conv4.size()[3]],mode='bilinear')
concat4 = torch.cat((conv4,self.upconv5[1](conv5x)),dim=1)
conv4 = self.iconv4(concat4)
conv4x = F.upsample(conv4, [rconv3.size()[2],rconv3.size()[3]],mode='bilinear')
concat3 = torch.cat((rconv3,self.upconv4[1](conv4x)),dim=1)
conv3 = self.iconv3(concat3)
conv3x = F.upsample(conv3, [pool1.size()[2],pool1.size()[3]],mode='bilinear')
concat2 = torch.cat((pool1,self.upconv3[1](conv3x)),dim=1)
conv2 = self.iconv2(concat2)
if self.is_proj:
proj6 = self.proj6(conv6)
proj5 = self.proj5(conv5)
proj4 = self.proj4(conv4)
proj3 = self.proj3(conv3)
proj2 = self.proj2(conv2)
return proj6,proj5,proj4,proj3,proj2
else:
return conv6, conv5, conv4, conv3, conv2
class pspnet_s(nn.Module):
"""
Modified PSPNet. https://github.com/meetshah1995/pytorch-semseg/blob/master/ptsemseg/models/pspnet.py
"""
def __init__(self, is_proj=True,groups=1):
super(pspnet_s, self).__init__()
self.inplanes = 32
self.is_proj = is_proj
# Encoder
self.convbnrelu1_1 = conv2DBatchNormRelu(in_channels=3, k_size=3, n_filters=16,
padding=1, stride=2)
self.convbnrelu1_2 = conv2DBatchNormRelu(in_channels=16, k_size=3, n_filters=16,
padding=1, stride=1)
self.convbnrelu1_3 = conv2DBatchNormRelu(in_channels=16, k_size=3, n_filters=32,
padding=1, stride=1)
# Vanilla Residual Blocks
self.res_block3 = self._make_layer(residualBlock,64,1,stride=2)
self.res_block5 = self._make_layer(residualBlock,128,1,stride=2)
self.res_block6 = self._make_layer(residualBlock,128,1,stride=2)
self.res_block7 = self._make_layer(residualBlock,128,1,stride=2)
self.pyramid_pooling = pyramidPooling(128, levels=3)
# Iconvs
self.upconv6 = nn.Sequential(nn.Upsample(scale_factor=2),
conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1))
self.iconv5 = conv2DBatchNormRelu(in_channels=192, k_size=3, n_filters=128,
padding=1, stride=1)
self.upconv5 = nn.Sequential(nn.Upsample(scale_factor=2),
conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1))
self.iconv4 = conv2DBatchNormRelu(in_channels=192, k_size=3, n_filters=128,
padding=1, stride=1)
self.upconv4 = nn.Sequential(nn.Upsample(scale_factor=2),
conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1))
self.iconv3 = conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1)
#self.upconv3 = nn.Sequential(nn.Upsample(scale_factor=2),
# conv2DBatchNormRelu(in_channels=64, k_size=3, n_filters=32,
# padding=1, stride=1))
#self.iconv2 = conv2DBatchNormRelu(in_channels=64, k_size=3, n_filters=64,
# padding=1, stride=1)
if self.is_proj:
self.proj6 = conv2DBatchNormRelu(in_channels=128,k_size=1,n_filters=128//groups, padding=0,stride=1)
self.proj5 = conv2DBatchNormRelu(in_channels=128,k_size=1,n_filters=128//groups, padding=0,stride=1)
self.proj4 = conv2DBatchNormRelu(in_channels=128,k_size=1,n_filters=128//groups, padding=0,stride=1)
self.proj3 = conv2DBatchNormRelu(in_channels=64, k_size=1,n_filters=64//groups, padding=0,stride=1)
#self.proj2 = conv2DBatchNormRelu(in_channels=64, k_size=1,n_filters=64//groups, padding=0,stride=1)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if hasattr(m.bias,'data'):
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
# H, W -> H/2, W/2
conv1 = self.convbnrelu1_1(x)
conv1 = self.convbnrelu1_2(conv1)
conv1 = self.convbnrelu1_3(conv1)
## H/2, W/2 -> H/4, W/4
pool1 = F.max_pool2d(conv1, 3, 2, 1)
# H/4, W/4 -> H/16, W/16
rconv3 = self.res_block3(pool1)
conv4 = self.res_block5(rconv3)
conv5 = self.res_block6(conv4)
conv6 = self.res_block7(conv5)
conv6 = self.pyramid_pooling(conv6)
conv6x = F.upsample(conv6, [conv5.size()[2],conv5.size()[3]],mode='bilinear')
concat5 = torch.cat((conv5,self.upconv6[1](conv6x)),dim=1)
conv5 = self.iconv5(concat5)
conv5x = F.upsample(conv5, [conv4.size()[2],conv4.size()[3]],mode='bilinear')
concat4 = torch.cat((conv4,self.upconv5[1](conv5x)),dim=1)
conv4 = self.iconv4(concat4)
conv4x = F.upsample(conv4, [rconv3.size()[2],rconv3.size()[3]],mode='bilinear')
concat3 = torch.cat((rconv3,self.upconv4[1](conv4x)),dim=1)
conv3 = self.iconv3(concat3)
#conv3x = F.upsample(conv3, [pool1.size()[2],pool1.size()[3]],mode='bilinear')
#concat2 = torch.cat((pool1,self.upconv3[1](conv3x)),dim=1)
#conv2 = self.iconv2(concat2)
if self.is_proj:
proj6 = self.proj6(conv6)
proj5 = self.proj5(conv5)
proj4 = self.proj4(conv4)
proj3 = self.proj3(conv3)
# proj2 = self.proj2(conv2)
# return proj6,proj5,proj4,proj3,proj2
return proj6,proj5,proj4,proj3
else:
# return conv6, conv5, conv4, conv3, conv2
return conv6, conv5, conv4, conv3
class bfmodule(nn.Module):
def __init__(self, inplanes, outplanes):
super(bfmodule, self).__init__()
self.proj = conv2DBatchNormRelu(in_channels=inplanes,k_size=1,n_filters=64,padding=0,stride=1)
self.inplanes = 64
# Vanilla Residual Blocks
self.res_block3 = self._make_layer(residualBlock,64,1,stride=2)
self.res_block5 = self._make_layer(residualBlock,64,1,stride=2)
self.res_block6 = self._make_layer(residualBlock,64,1,stride=2)
self.res_block7 = self._make_layer(residualBlock,128,1,stride=2)
self.pyramid_pooling = pyramidPooling(128, levels=3)
# Iconvs
self.upconv6 = conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1)
self.upconv5 = conv2DBatchNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1)
self.upconv4 = conv2DBatchNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1)
self.upconv3 = conv2DBatchNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1)
self.iconv5 = conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1)
self.iconv4 = conv2DBatchNormRelu(in_channels=96, k_size=3, n_filters=64,
padding=1, stride=1)
self.iconv3 = conv2DBatchNormRelu(in_channels=96, k_size=3, n_filters=64,
padding=1, stride=1)
self.iconv2 = nn.Sequential(conv2DBatchNormRelu(in_channels=96, k_size=3, n_filters=64,
padding=1, stride=1),
nn.Conv2d(64, outplanes,kernel_size=3, stride=1, padding=1, bias=True))
self.proj6 = nn.Conv2d(128, outplanes,kernel_size=3, stride=1, padding=1, bias=True)
self.proj5 = nn.Conv2d(64, outplanes,kernel_size=3, stride=1, padding=1, bias=True)
self.proj4 = nn.Conv2d(64, outplanes,kernel_size=3, stride=1, padding=1, bias=True)
self.proj3 = nn.Conv2d(64, outplanes,kernel_size=3, stride=1, padding=1, bias=True)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if hasattr(m.bias,'data'):
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
proj = self.proj(x) # 4x
rconv3 = self.res_block3(proj) #8x
conv4 = self.res_block5(rconv3) #16x
conv5 = self.res_block6(conv4) #32x
conv6 = self.res_block7(conv5) #64x
conv6 = self.pyramid_pooling(conv6) #64x
pred6 = self.proj6(conv6)
conv6u = F.upsample(conv6, [conv5.size()[2],conv5.size()[3]], mode='bilinear')
concat5 = torch.cat((conv5,self.upconv6(conv6u)),dim=1)
conv5 = self.iconv5(concat5) #32x
pred5 = self.proj5(conv5)
conv5u = F.upsample(conv5, [conv4.size()[2],conv4.size()[3]], mode='bilinear')
concat4 = torch.cat((conv4,self.upconv5(conv5u)),dim=1)
conv4 = self.iconv4(concat4) #16x
pred4 = self.proj4(conv4)
conv4u = F.upsample(conv4, [rconv3.size()[2],rconv3.size()[3]], mode='bilinear')
concat3 = torch.cat((rconv3,self.upconv4(conv4u)),dim=1)
conv3 = self.iconv3(concat3) # 8x
pred3 = self.proj3(conv3)
conv3u = F.upsample(conv3, [x.size()[2],x.size()[3]], mode='bilinear')
concat2 = torch.cat((proj,self.upconv3(conv3u)),dim=1)
pred2 = self.iconv2(concat2) # 4x
return pred2, pred3, pred4, pred5, pred6
class bfmodule_feat(nn.Module):
def __init__(self, inplanes, outplanes):
super(bfmodule_feat, self).__init__()
self.proj = conv2DBatchNormRelu(in_channels=inplanes,k_size=1,n_filters=64,padding=0,stride=1)
self.inplanes = 64
# Vanilla Residual Blocks
self.res_block3 = self._make_layer(residualBlock,64,1,stride=2)
self.res_block5 = self._make_layer(residualBlock,64,1,stride=2)
self.res_block6 = self._make_layer(residualBlock,64,1,stride=2)
self.res_block7 = self._make_layer(residualBlock,128,1,stride=2)
self.pyramid_pooling = pyramidPooling(128, levels=3)
# Iconvs
self.upconv6 = conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1)
self.upconv5 = conv2DBatchNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1)
self.upconv4 = conv2DBatchNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1)
self.upconv3 = conv2DBatchNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1)
self.iconv5 = conv2DBatchNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1)
self.iconv4 = conv2DBatchNormRelu(in_channels=96, k_size=3, n_filters=64,
padding=1, stride=1)
self.iconv3 = conv2DBatchNormRelu(in_channels=96, k_size=3, n_filters=64,
padding=1, stride=1)
self.iconv2 = conv2DBatchNormRelu(in_channels=96, k_size=3, n_filters=64,
padding=1, stride=1)
self.proj6 = nn.Conv2d(128, outplanes,kernel_size=3, stride=1, padding=1, bias=True)
self.proj5 = nn.Conv2d(64, outplanes,kernel_size=3, stride=1, padding=1, bias=True)
self.proj4 = nn.Conv2d(64, outplanes,kernel_size=3, stride=1, padding=1, bias=True)
self.proj3 = nn.Conv2d(64, outplanes,kernel_size=3, stride=1, padding=1, bias=True)
self.proj2 = nn.Conv2d(64, outplanes,kernel_size=3, stride=1, padding=1, bias=True)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if hasattr(m.bias,'data'):
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
proj = self.proj(x) # 4x
rconv3 = self.res_block3(proj) #8x
conv4 = self.res_block5(rconv3) #16x
conv5 = self.res_block6(conv4) #32x
conv6 = self.res_block7(conv5) #64x
conv6 = self.pyramid_pooling(conv6) #64x
pred6 = self.proj6(conv6)
conv6u = F.upsample(conv6, [conv5.size()[2],conv5.size()[3]], mode='bilinear')
concat5 = torch.cat((conv5,self.upconv6(conv6u)),dim=1)
conv5 = self.iconv5(concat5) #32x
pred5 = self.proj5(conv5)
conv5u = F.upsample(conv5, [conv4.size()[2],conv4.size()[3]], mode='bilinear')
concat4 = torch.cat((conv4,self.upconv5(conv5u)),dim=1)
conv4 = self.iconv4(concat4) #16x
pred4 = self.proj4(conv4)
conv4u = F.upsample(conv4, [rconv3.size()[2],rconv3.size()[3]], mode='bilinear')
concat3 = torch.cat((rconv3,self.upconv4(conv4u)),dim=1)
conv3 = self.iconv3(concat3) # 8x
pred3 = self.proj3(conv3)
conv3u = F.upsample(conv3, [x.size()[2],x.size()[3]], mode='bilinear')
concat2 = torch.cat((proj,self.upconv3(conv3u)),dim=1)
conv2 = self.iconv2(concat2) # 4x
pred2 = self.proj2(conv2) # 4x
return pred2, conv2
def compute_geo_costs(rot, trans, Ex, Kinv, hp0, hp1, tau, Kinv_n=None):
if Kinv_n is None: Kinv_n = Kinv
R01 = kornia.angle_axis_to_rotation_matrix(rot)
H01 = Kinv.inverse().matmul(R01).matmul(Kinv_n)
comp_hp1 = H01.matmul(hp1.permute(0,2,1))
foe = (comp_hp1-tau*hp0.permute(0,2,1))
parallax3d = Kinv.matmul(foe)
p3dmag = parallax3d.norm(2,1)[:,np.newaxis]
parallax2d = (comp_hp1/comp_hp1[:,-1:]-hp0.permute(0,2,1))[:,:2]
p2dmag = parallax2d.norm(2,1)[:,np.newaxis]
p2dnorm = parallax2d / (1e-9+p2dmag)
foe_cam = Kinv.inverse().matmul(trans[:,:,np.newaxis])
foe_cam = foe_cam[:,:2] / (1e-9+foe_cam[:,-1:])
direct = foe_cam -hp0.permute(0,2,1)[:,:2]
directn = direct / (1e-9+direct.norm(2,1)[:,np.newaxis])
# metrics: 0) R-homography+symterr; 1) sampson 2) 2D angular 3) 3D sampson 4) 3D angular
##TODO validate
comp_hp0 = H01.inverse().matmul(hp0.permute(0,2,1))
mcost00 = parallax2d.norm(2,1)
mcost01 = (comp_hp0/comp_hp0[:,-1:] - hp1.permute(0,2,1))[:,:2].norm(2,1)
mcost1 = sampson_err(Kinv.matmul(hp0.permute(0,2,1)),
Kinv_n.matmul(hp1.permute(0,2,1)),Ex.cuda().permute(0,2,1)) # variable K
mcost2 = -(trans[:,-1:,np.newaxis]).sign()*(directn*p2dnorm).sum(1,keepdims=True)
mcost4 = -(trans[:,:,np.newaxis]*parallax3d).sum(1,keepdims=True)/(p3dmag+1e-9)
mcost3 = torch.clamp(1-mcost4.pow(2),0,1).sqrt()*p3dmag*mcost4.sign()
mcost10 = torch.clamp(1-mcost2.pow(2),0,1).sqrt()*p2dmag*mcost2.sign()
return mcost00, mcost01, mcost1, mcost2, mcost3, mcost4, p3dmag, mcost10
def get_skew_mat(transx,rotx):
rot = kornia.angle_axis_to_rotation_matrix(rotx)
trans = -rot.permute(0,2,1).matmul(transx[:,:,np.newaxis])[:,:,0]
rot = rot.permute(0,2,1)
tx = torch.zeros(transx.shape[0],3,3)
tx[:,0,1] = -transx[:,2]
tx[:,0,2] = transx[:,1]
tx[:,1,0] = transx[:,2]
tx[:,1,2] = -transx[:,0]
tx[:,2,0] = -transx[:,1]
tx[:,2,1] = transx[:,0]
return rot.matmul(tx)
def sampson_err(x1h, x2h, F):
l2 = F.permute(0,2,1).matmul(x1h)
l1 = F.matmul(x2h)
algdis = (l1 * x1h).sum(1)
dis = algdis**2 / (1e-9+l1[:,0]**2+l1[:,1]**2+l2[:,0]**2+l2[:,1]**2)
return dis
def get_intrinsics(intr, noise=False):
f = intr[0].float()
cx = intr[1].float()
cy = intr[2].float()
bs = f.shape[0]
delta = 1e-4
if noise:
fo = f.clone()
cxo = cx.clone()
cyo = cy.clone()
f = torch.Tensor(np.random.normal(loc=0., scale=delta,size=(bs,))).cuda().exp() * fo
cx = torch.Tensor(np.random.normal(loc=0.,scale=delta,size=(bs,))).cuda().exp() * cxo
cy = torch.Tensor(np.random.normal(loc=0.,scale=delta,size=(bs,))).cuda().exp() * cyo
Kinv = torch.Tensor(np.eye(3)[np.newaxis]).cuda().repeat(bs,1,1)
Kinv[:,2,2] *= f
Kinv[:,0,2] -= cx
Kinv[:,1,2] -= cy
Kinv /= f[:,np.newaxis,np.newaxis] #4,3,3
Taug = torch.cat(intr[4:10],-1).view(-1,bs).T # 4,6
Taug = torch.cat((Taug.view(bs,3,2).permute(0,2,1),Kinv[:,2:3]),1)
Kinv = Kinv.matmul(Taug)
if len(intr)>12:
Kinv_n = torch.Tensor(np.eye(3)[np.newaxis]).cuda().repeat(bs,1,1)
fn = intr[12].float()
Kinv_n[:,2,2] *= fn
Kinv_n[:,0,2] -= cx
Kinv_n[:,1,2] -= cy
Kinv_n /= fn[:,np.newaxis,np.newaxis] #4,3,3
elif noise:
f = torch.Tensor(np.random.normal(loc=0., scale=delta,size=(bs,))).cuda().exp() * fo
cx = torch.Tensor(np.random.normal(loc=0.,scale=delta,size=(bs,))).cuda().exp() * cxo
cy = torch.Tensor(np.random.normal(loc=0.,scale=delta,size=(bs,))).cuda().exp() * cyo
Kinv_n = torch.Tensor(np.eye(3)[np.newaxis]).cuda().repeat(bs,1,1)
Kinv_n[:,2,2] *= f
Kinv_n[:,0,2] -= cx
Kinv_n[:,1,2] -= cy
Kinv_n /= f[:,np.newaxis,np.newaxis] #4,3,3
Taug = torch.cat(intr[4:10],-1).view(-1,bs).T # 4,6
Taug = torch.cat((Taug.view(bs,3,2).permute(0,2,1),Kinv_n[:,2:3]),1)
Kinv_n = Kinv_n.matmul(Taug)
else:
Kinv_n = Kinv
return Kinv, Kinv_n
def F_ngransac(hp0,hp1,Ks,rand, unc_occ, iters=1000,cv=False,Kn=None):
cv=True
if Kn is None:
Kn = Ks
import cv2
b = hp1.shape[0]
hp0_cpu = np.asarray(hp0.cpu())
hp1_cpu = np.asarray(hp1.cpu())
if not rand:
## TODO
fmask = np.ones(hp0.shape[1]).astype(bool)
rand_seed = 0
else:
fmask = np.random.choice([True, False], size=hp0.shape[1], p=[0.1,0.9])
rand_seed = np.random.randint(0,1000) # random seed to by used in C++
### TODO
hp0 = Ks.inverse().matmul(hp0.permute(0,2,1)).permute(0,2,1)
hp1 = Kn.inverse().matmul(hp1.permute(0,2,1)).permute(0,2,1)
ratios = torch.zeros(hp0[:1,:,:1].shape)
probs = torch.Tensor(np.ones(fmask.sum()))/fmask.sum()
probs = probs[np.newaxis,:,np.newaxis]
#probs = torch.Tensor(np.zeros(fmask.sum()))
##unc_occ = unc_occ<0; probs[unc_occ[0]] = 1./unc_occ.float().sum()
#probs = F.softmax(-0.1*unc_occ[0],-1).cpu()
#probs = probs[np.newaxis,:,np.newaxis]
Es = torch.zeros((b, 3,3)).float() # estimated model
rot = torch.zeros((b, 3)).float() # estimated model
trans = torch.zeros((b, 3)).float() # estimated model
out_model = torch.zeros((3, 3)).float() # estimated model
out_inliers = torch.zeros(probs.size()) # inlier mask of estimated model
out_gradients = torch.zeros(probs.size()) # gradient tensor (only used during training)
for i in range(b):
pts1 = hp0[i:i+1, fmask,:2].cpu()
pts2 = hp1[i:i+1, fmask,:2].cpu()
# create data tensor of feature coordinates and matching ratios
correspondences = torch.cat((pts1, pts2, ratios), axis=2)
correspondences = correspondences.permute(2,1,0)
#incount = ngransac.find_fundamental_mat(correspondences, probs, rand_seed, 1000, 0.1, True, out_model, out_inliers, out_gradients)
#E = K1.T.dot(out_model).dot(K0)
if cv==True:
E, ffmask = cv2.findEssentialMat(np.asarray(pts1[0]), np.asarray(pts2[0]), np.eye(3), cv2.FM_RANSAC,threshold=0.0001)
ffmask = ffmask[:,0]
Es[i]=torch.Tensor(E)
else:
import ngransac
incount = ngransac.find_essential_mat(correspondences, probs, rand_seed, iters, 0.0001, out_model, out_inliers, out_gradients)
Es[i]=out_model
E = np.asarray(out_model)
maskk = np.asarray(out_inliers[0,:,0])
ffmask = fmask.copy()
ffmask[fmask] = maskk
K1 = np.asarray(Kn[i].cpu())
K0 = np.asarray(Ks[i].cpu())
R1, R2, T = cv2.decomposeEssentialMat(E)
for rott in [(R1,T),(R2,T),(R1,-T),(R2,-T)]:
if testEss(K0,K1,rott[0],rott[1],hp0_cpu[0,ffmask].T, hp1_cpu[i,ffmask].T):
#if testEss(K0,K1,rott[0],rott[1],hp0_cpu[0,ffmask].T[:,ffmask.sum()//10::ffmask.sum()//10], hp1_cpu[i,ffmask].T[:,ffmask.sum()//10::ffmask.sum()//10]):
R01=rott[0].T
t10=-R01.dot(rott[1][:,0])
if not 't10' in locals():
t10 = np.asarray([0,0,1])
R01 = np.eye(3)
rot[i] = torch.Tensor(cv2.Rodrigues(R01)[0][:,0]).cuda()
trans[i] = torch.Tensor(t10).cuda()
return rot, trans, Es
def testEss(K0,K1,R,T,p1,p2):
import cv2
testP = cv2.triangulatePoints(K0.dot(np.concatenate( (np.eye(3),np.zeros((3,1))), -1)),
K1.dot(np.concatenate( (R,T), -1)),
p1[:2],p2[:2])
Z1 = testP[2,:]/testP[-1,:]
Z2 = (R.dot(Z1*np.linalg.inv(K0).dot(p1))+T)[-1,:]
if ((Z1>0).sum() > (Z1<=0).sum()) and ((Z2>0).sum() > (Z2<=0).sum()):
#print(Z1)
#print(Z2)
return True
else:
return False
|
banmo-main
|
third_party/vcnplus/models/submodule.py
|
import pdb
import torch.nn as nn
import math
import torch
from torch.nn.parameter import Parameter
import torch.nn.functional as F
from torch.nn import Module
from torch.nn.modules.conv import _ConvNd
from torch.nn.modules.utils import _quadruple
from torch.autograd import Variable
from torch.nn import Conv2d
def conv4d(data,filters,bias=None,permute_filters=True,use_half=False):
"""
This is done by stacking results of multiple 3D convolutions, and is very slow.
Taken from https://github.com/ignacio-rocco/ncnet
"""
b,c,h,w,d,t=data.size()
data=data.permute(2,0,1,3,4,5).contiguous() # permute to avoid making contiguous inside loop
# Same permutation is done with filters, unless already provided with permutation
if permute_filters:
filters=filters.permute(2,0,1,3,4,5).contiguous() # permute to avoid making contiguous inside loop
c_out=filters.size(1)
if use_half:
output = Variable(torch.HalfTensor(h,b,c_out,w,d,t),requires_grad=data.requires_grad)
else:
output = Variable(torch.zeros(h,b,c_out,w,d,t),requires_grad=data.requires_grad)
padding=filters.size(0)//2
if use_half:
Z=Variable(torch.zeros(padding,b,c,w,d,t).half())
else:
Z=Variable(torch.zeros(padding,b,c,w,d,t))
if data.is_cuda:
Z=Z.cuda(data.get_device())
output=output.cuda(data.get_device())
data_padded = torch.cat((Z,data,Z),0)
for i in range(output.size(0)): # loop on first feature dimension
# convolve with center channel of filter (at position=padding)
output[i,:,:,:,:,:]=F.conv3d(data_padded[i+padding,:,:,:,:,:],
filters[padding,:,:,:,:,:], bias=bias, stride=1, padding=padding)
# convolve with upper/lower channels of filter (at postions [:padding] [padding+1:])
for p in range(1,padding+1):
output[i,:,:,:,:,:]=output[i,:,:,:,:,:]+F.conv3d(data_padded[i+padding-p,:,:,:,:,:],
filters[padding-p,:,:,:,:,:], bias=None, stride=1, padding=padding)
output[i,:,:,:,:,:]=output[i,:,:,:,:,:]+F.conv3d(data_padded[i+padding+p,:,:,:,:,:],
filters[padding+p,:,:,:,:,:], bias=None, stride=1, padding=padding)
output=output.permute(1,2,0,3,4,5).contiguous()
return output
class Conv4d(_ConvNd):
"""Applies a 4D convolution over an input signal composed of several input
planes.
"""
def __init__(self, in_channels, out_channels, kernel_size, bias=True, pre_permuted_filters=True):
# stride, dilation and groups !=1 functionality not tested
stride=1
dilation=1
groups=1
# zero padding is added automatically in conv4d function to preserve tensor size
padding = 0
kernel_size = _quadruple(kernel_size)
stride = _quadruple(stride)
padding = _quadruple(padding)
dilation = _quadruple(dilation)
super(Conv4d, self).__init__(
in_channels, out_channels, kernel_size, stride, padding, dilation,
False, _quadruple(0), groups, bias)
# weights will be sliced along one dimension during convolution loop
# make the looping dimension to be the first one in the tensor,
# so that we don't need to call contiguous() inside the loop
self.pre_permuted_filters=pre_permuted_filters
if self.pre_permuted_filters:
self.weight.data=self.weight.data.permute(2,0,1,3,4,5).contiguous()
self.use_half=False
# self.isbias = bias
# if not self.isbias:
# self.bn = torch.nn.BatchNorm1d(out_channels)
def forward(self, input):
out = conv4d(input, self.weight, bias=self.bias,permute_filters=not self.pre_permuted_filters,use_half=self.use_half) # filters pre-permuted in constructor
# if not self.isbias:
# b,c,u,v,h,w = out.shape
# out = self.bn(out.view(b,c,-1)).view(b,c,u,v,h,w)
return out
class fullConv4d(torch.nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, bias=True, pre_permuted_filters=True):
super(fullConv4d, self).__init__()
self.conv = Conv4d(in_channels, out_channels, kernel_size, bias=bias, pre_permuted_filters=pre_permuted_filters)
self.isbias = bias
if not self.isbias:
self.bn = torch.nn.BatchNorm1d(out_channels)
def forward(self, input):
out = self.conv(input)
if not self.isbias:
b,c,u,v,h,w = out.shape
out = self.bn(out.view(b,c,-1)).view(b,c,u,v,h,w)
return out
class butterfly4D(torch.nn.Module):
'''
butterfly 4d
'''
def __init__(self, fdima, fdimb, withbn=True, full=True,groups=1):
super(butterfly4D, self).__init__()
self.proj = nn.Sequential(projfeat4d(fdima, fdimb, 1, with_bn=withbn,groups=groups),
nn.ReLU(inplace=True),)
self.conva1 = sepConv4dBlock(fdimb,fdimb,with_bn=withbn, stride=(2,1,1),full=full,groups=groups)
self.conva2 = sepConv4dBlock(fdimb,fdimb,with_bn=withbn, stride=(2,1,1),full=full,groups=groups)
self.convb3 = sepConv4dBlock(fdimb,fdimb,with_bn=withbn, stride=(1,1,1),full=full,groups=groups)
self.convb2 = sepConv4dBlock(fdimb,fdimb,with_bn=withbn, stride=(1,1,1),full=full,groups=groups)
self.convb1 = sepConv4dBlock(fdimb,fdimb,with_bn=withbn, stride=(1,1,1),full=full,groups=groups)
#@profile
def forward(self,x):
out = self.proj(x)
b,c,u,v,h,w = out.shape # 9x9
out1 = self.conva1(out) # 5x5, 3
_,c1,u1,v1,h1,w1 = out1.shape
out2 = self.conva2(out1) # 3x3, 9
_,c2,u2,v2,h2,w2 = out2.shape
out2 = self.convb3(out2) # 3x3, 9
tout1 = F.upsample(out2.view(b,c,u2,v2,-1),(u1,v1,h2*w2),mode='trilinear').view(b,c,u1,v1,h2,w2) # 5x5
tout1 = F.upsample(tout1.view(b,c,-1,h2,w2),(u1*v1,h1,w1),mode='trilinear').view(b,c,u1,v1,h1,w1) # 5x5
out1 = tout1 + out1
out1 = self.convb2(out1)
tout = F.upsample(out1.view(b,c,u1,v1,-1),(u,v,h1*w1),mode='trilinear').view(b,c,u,v,h1,w1)
tout = F.upsample(tout.view(b,c,-1,h1,w1),(u*v,h,w),mode='trilinear').view(b,c,u,v,h,w)
out = tout + out
out = self.convb1(out)
return out
class projfeat4d(torch.nn.Module):
'''
Turn 3d projection into 2d projection
'''
def __init__(self, in_planes, out_planes, stride, with_bn=True,groups=1):
super(projfeat4d, self).__init__()
self.with_bn = with_bn
self.stride = stride
self.conv1 = nn.Conv3d(in_planes, out_planes, 1, (stride,stride,1), padding=0,bias=not with_bn,groups=groups)
self.bn = nn.BatchNorm3d(out_planes)
def forward(self,x):
b,c,u,v,h,w = x.size()
x = self.conv1(x.view(b,c,u,v,h*w))
if self.with_bn:
x = self.bn(x)
_,c,u,v,_ = x.shape
x = x.view(b,c,u,v,h,w)
return x
class sepConv4d(torch.nn.Module):
'''
Separable 4d convolution block as 2 3D convolutions
'''
def __init__(self, in_planes, out_planes, stride=(1,1,1), with_bn=True, ksize=3, full=True,groups=1):
super(sepConv4d, self).__init__()
bias = not with_bn
self.isproj = False
self.stride = stride[0]
expand = 1
if with_bn:
if in_planes != out_planes:
self.isproj = True
self.proj = nn.Sequential(nn.Conv2d(in_planes, out_planes, 1, bias=bias, padding=0,groups=groups),
nn.BatchNorm2d(out_planes))
if full:
self.conv1 = nn.Sequential(nn.Conv3d(in_planes*expand, in_planes, (1,ksize,ksize), stride=(1,self.stride,self.stride), bias=bias, padding=(0,ksize//2,ksize//2),groups=groups),
nn.BatchNorm3d(in_planes))
else:
self.conv1 = nn.Sequential(nn.Conv3d(in_planes*expand, in_planes, (1,ksize,ksize), stride=1, bias=bias, padding=(0,ksize//2,ksize//2),groups=groups),
nn.BatchNorm3d(in_planes))
self.conv2 = nn.Sequential(nn.Conv3d(in_planes, in_planes*expand, (ksize,ksize,1), stride=(self.stride,self.stride,1), bias=bias, padding=(ksize//2,ksize//2,0),groups=groups),
nn.BatchNorm3d(in_planes*expand))
else:
if in_planes != out_planes:
self.isproj = True
self.proj = nn.Conv2d(in_planes, out_planes, 1, bias=bias, padding=0,groups=groups)
if full:
self.conv1 = nn.Conv3d(in_planes*expand, in_planes, (1,ksize,ksize), stride=(1,self.stride,self.stride), bias=bias, padding=(0,ksize//2,ksize//2),groups=groups)
else:
self.conv1 = nn.Conv3d(in_planes*expand, in_planes, (1,ksize,ksize), stride=1, bias=bias, padding=(0,ksize//2,ksize//2),groups=groups)
self.conv2 = nn.Conv3d(in_planes, in_planes*expand, (ksize,ksize,1), stride=(self.stride,self.stride,1), bias=bias, padding=(ksize//2,ksize//2,0),groups=groups)
self.relu = nn.ReLU(inplace=True)
#@profile
def forward(self,x):
b,c,u,v,h,w = x.shape
x = self.conv2(x.view(b,c,u,v,-1))
b,c,u,v,_ = x.shape
x = self.relu(x)
x = self.conv1(x.view(b,c,-1,h,w))
b,c,_,h,w = x.shape
if self.isproj:
x = self.proj(x.view(b,c,-1,w))
x = x.view(b,-1,u,v,h,w)
return x
class sepConv4dBlock(torch.nn.Module):
'''
Separable 4d convolution block as 2 2D convolutions and a projection
layer
'''
def __init__(self, in_planes, out_planes, stride=(1,1,1), with_bn=True, full=True,groups=1):
super(sepConv4dBlock, self).__init__()
if in_planes == out_planes and stride==(1,1,1):
self.downsample = None
else:
if full:
self.downsample = sepConv4d(in_planes, out_planes, stride, with_bn=with_bn,ksize=1, full=full,groups=groups)
else:
self.downsample = projfeat4d(in_planes, out_planes,stride[0], with_bn=with_bn,groups=groups)
self.conv1 = sepConv4d(in_planes, out_planes, stride, with_bn=with_bn, full=full ,groups=groups)
self.conv2 = sepConv4d(out_planes, out_planes,(1,1,1), with_bn=with_bn, full=full,groups=groups)
self.relu1 = nn.ReLU(inplace=True)
self.relu2 = nn.ReLU(inplace=True)
#@profile
def forward(self,x):
out = self.relu1(self.conv1(x))
if self.downsample:
x = self.downsample(x)
out = self.relu2(x + self.conv2(out))
return out
##import torch.backends.cudnn as cudnn
##cudnn.benchmark = True
#import time
##im = torch.randn(9,64,9,160,224).cuda()
##net = torch.nn.Conv3d(64, 64, 3).cuda()
##net = Conv4d(1,1,3,bias=True,pre_permuted_filters=True).cuda()
##net = sepConv4dBlock(2,2,stride=(1,1,1)).cuda()
#
##im = torch.randn(1,16,9,9,96,320).cuda()
##net = sepConv4d(16,16,with_bn=False).cuda()
#
##im = torch.randn(1,16,81,96,320).cuda()
##net = torch.nn.Conv3d(16,16,(1,3,3),padding=(0,1,1)).cuda()
#
##im = torch.randn(1,16,9,9,96*320).cuda()
##net = torch.nn.Conv3d(16,16,(3,3,1),padding=(1,1,0)).cuda()
#
##im = torch.randn(10000,10,9,9).cuda()
##net = torch.nn.Conv2d(10,10,3,padding=1).cuda()
#
##im = torch.randn(81,16,96,320).cuda()
##net = torch.nn.Conv2d(16,16,3,padding=1).cuda()
#c= int(16 *1)
#cp = int(16 *1)
#h=int(96 *4)
#w=int(320 *4)
#k=3
#im = torch.randn(1,c,h,w).cuda()
#net = torch.nn.Conv2d(c,cp,k,padding=k//2).cuda()
#
#im2 = torch.randn(cp,k*k*c).cuda()
#im1 = F.unfold(im, (k,k), padding=k//2)[0]
#
#
#net(im)
#net(im)
#torch.mm(im2,im1)
#torch.mm(im2,im1)
#torch.cuda.synchronize()
#beg = time.time()
#for i in range(100):
# net(im)
# #im1 = F.unfold(im, (k,k), padding=k//2)[0]
# torch.mm(im2,im1)
#torch.cuda.synchronize()
#print('%f'%((time.time()-beg)*10.))
|
banmo-main
|
third_party/vcnplus/models/conv4d.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
import math
import pdb
import time
import cv2
from .submodule import pspnet, bfmodule, bfmodule_feat, conv, compute_geo_costs, get_skew_mat, get_intrinsics, F_ngransac
from .conv4d import sepConv4d, butterfly4D
class flow_reg(nn.Module):
"""
Soft winner-take-all that selects the most likely diplacement.
Set ent=True to enable entropy output.
Set maxdisp to adjust maximum allowed displacement towards one side.
maxdisp=4 searches for a 9x9 region.
Set fac to squeeze search window.
maxdisp=4 and fac=2 gives search window of 9x5
"""
def __init__(self, size, ent=False, maxdisp = int(4), fac=1):
B,W,H = size
super(flow_reg, self).__init__()
self.ent = ent
self.md = maxdisp
self.fac = fac
self.truncated = True
self.wsize = 3 # by default using truncation 7x7
flowrangey = range(-maxdisp,maxdisp+1)
flowrangex = range(-int(maxdisp//self.fac),int(maxdisp//self.fac)+1)
meshgrid = np.meshgrid(flowrangex,flowrangey)
flowy = np.tile( np.reshape(meshgrid[0],[1,2*maxdisp+1,2*int(maxdisp//self.fac)+1,1,1]), (B,1,1,H,W) )
flowx = np.tile( np.reshape(meshgrid[1],[1,2*maxdisp+1,2*int(maxdisp//self.fac)+1,1,1]), (B,1,1,H,W) )
self.register_buffer('flowx',torch.Tensor(flowx))
self.register_buffer('flowy',torch.Tensor(flowy))
self.pool3d = nn.MaxPool3d((self.wsize*2+1,self.wsize*2+1,1),stride=1,padding=(self.wsize,self.wsize,0))
def forward(self, x):
b,u,v,h,w = x.shape
oldx = x
if self.truncated:
# truncated softmax
x = x.view(b,u*v,h,w)
idx = x.argmax(1)[:,np.newaxis]
if x.is_cuda:
mask = Variable(torch.cuda.HalfTensor(b,u*v,h,w)).fill_(0)
else:
mask = Variable(torch.FloatTensor(b,u*v,h,w)).fill_(0)
mask.scatter_(1,idx,1)
mask = mask.view(b,1,u,v,-1)
mask = self.pool3d(mask)[:,0].view(b,u,v,h,w)
ninf = x.clone().fill_(-np.inf).view(b,u,v,h,w)
x = torch.where(mask.byte(),oldx,ninf)
else:
self.wsize = (np.sqrt(u*v)-1)/2
b,u,v,h,w = x.shape
x = F.softmax(x.view(b,-1,h,w),1).view(b,u,v,h,w)
if np.isnan(x.min().detach().cpu()):
#pdb.set_trace()
x[torch.isnan(x)] = F.softmax(oldx[torch.isnan(x)])
outx = torch.sum(torch.sum(x*self.flowx,1),1,keepdim=True)
outy = torch.sum(torch.sum(x*self.flowy,1),1,keepdim=True)
if self.ent:
# local
local_entropy = (-x*torch.clamp(x,1e-9,1-1e-9).log()).sum(1).sum(1)[:,np.newaxis]
if self.wsize == 0:
local_entropy[:] = 1.
else:
local_entropy /= np.log((self.wsize*2+1)**2)
# global
x = F.softmax(oldx.view(b,-1,h,w),1).view(b,u,v,h,w)
global_entropy = (-x*torch.clamp(x,1e-9,1-1e-9).log()).sum(1).sum(1)[:,np.newaxis]
global_entropy /= np.log(x.shape[1]*x.shape[2])
return torch.cat([outx,outy],1),torch.cat([local_entropy, global_entropy],1)
else:
return torch.cat([outx,outy],1),None
class WarpModule(nn.Module):
"""
taken from https://github.com/NVlabs/PWC-Net/blob/master/PyTorch/models/PWCNet.py
"""
def __init__(self, size):
super(WarpModule, self).__init__()
B,W,H = size
# mesh grid
xx = torch.arange(0, W).view(1,-1).repeat(H,1)
yy = torch.arange(0, H).view(-1,1).repeat(1,W)
xx = xx.view(1,1,H,W).repeat(B,1,1,1)
yy = yy.view(1,1,H,W).repeat(B,1,1,1)
self.register_buffer('grid',torch.cat((xx,yy),1).float())
def forward(self, x, flo):
"""
warp an image/tensor (im2) back to im1, according to the optical flow
x: [B, C, H, W] (im2)
flo: [B, 2, H, W] flow
"""
B, C, H, W = x.size()
vgrid = self.grid + flo
# scale grid to [-1,1]
vgrid[:,0,:,:] = 2.0*vgrid[:,0,:,:]/max(W-1,1)-1.0
vgrid[:,1,:,:] = 2.0*vgrid[:,1,:,:]/max(H-1,1)-1.0
vgrid = vgrid.permute(0,2,3,1)
#output = nn.functional.grid_sample(x, vgrid)
output = nn.functional.grid_sample(x, vgrid, align_corners=True)
mask = ((vgrid[:,:,:,0].abs()<1) * (vgrid[:,:,:,1].abs()<1)) >0
return output*mask.unsqueeze(1).float(), mask
def get_grid(B,H,W):
meshgrid_base = np.meshgrid(range(0,W), range(0,H))[::-1]
basey = np.reshape(meshgrid_base[0],[1,1,1,H,W])
basex = np.reshape(meshgrid_base[1],[1,1,1,H,W])
grid = torch.tensor(np.concatenate((basex.reshape((-1,H,W,1)),basey.reshape((-1,H,W,1))),-1)).cuda().float()
return grid.view(1,1,H,W,2)
class VCN(nn.Module):
"""
VCN.
md defines maximum displacement for each level, following a coarse-to-fine-warping scheme
fac defines squeeze parameter for the coarsest level
"""
def __init__(self, size, md=[4,4,4,4,4], fac=1., exp_unc=True):
super(VCN,self).__init__()
self.md = md
self.fac = fac
use_entropy = True
withbn = True
## pspnet
self.pspnet = pspnet(is_proj=False)
### Volumetric-UNet
fdima1 = 128 # 6/5/4
fdima2 = 64 # 3/2
fdimb1 = 16 # 6/5/4/3
fdimb2 = 12 # 2
full=False
self.f6 = butterfly4D(fdima1, fdimb1,withbn=withbn,full=full)
self.p6 = sepConv4d(fdimb1,fdimb1, with_bn=False, full=full)
self.f5 = butterfly4D(fdima1, fdimb1,withbn=withbn, full=full)
self.p5 = sepConv4d(fdimb1,fdimb1, with_bn=False,full=full)
self.f4 = butterfly4D(fdima1, fdimb1,withbn=withbn,full=full)
self.p4 = sepConv4d(fdimb1,fdimb1, with_bn=False,full=full)
self.f3 = butterfly4D(fdima2, fdimb1,withbn=withbn,full=full)
self.p3 = sepConv4d(fdimb1,fdimb1, with_bn=False,full=full)
full=True
self.f2 = butterfly4D(fdima2, fdimb2,withbn=withbn,full=full)
self.p2 = sepConv4d(fdimb2,fdimb2, with_bn=False,full=full)
self.flow_reg64 = flow_reg([fdimb1*size[0],size[1]//64,size[2]//64], ent=use_entropy, maxdisp=self.md[0], fac=self.fac)
self.flow_reg32 = flow_reg([fdimb1*size[0],size[1]//32,size[2]//32], ent=use_entropy, maxdisp=self.md[1])
self.flow_reg16 = flow_reg([fdimb1*size[0],size[1]//16,size[2]//16], ent=use_entropy, maxdisp=self.md[2])
self.flow_reg8 = flow_reg([fdimb1*size[0],size[1]//8,size[2]//8] , ent=use_entropy, maxdisp=self.md[3])
self.flow_reg4 = flow_reg([fdimb2*size[0],size[1]//4,size[2]//4] , ent=use_entropy, maxdisp=self.md[4])
self.warp5 = WarpModule([size[0],size[1]//32,size[2]//32])
self.warp4 = WarpModule([size[0],size[1]//16,size[2]//16])
self.warp3 = WarpModule([size[0],size[1]//8,size[2]//8])
self.warp2 = WarpModule([size[0],size[1]//4,size[2]//4])
if self.training:
self.warpx = WarpModule([size[0],size[1],size[2]])
## hypotheses fusion modules, adopted from the refinement module of PWCNet
# https://github.com/NVlabs/PWC-Net/blob/master/PyTorch/models/PWCNet.py
# c6
self.dc6_conv1 = conv(128+4*fdimb1, 128, kernel_size=3, stride=1, padding=1, dilation=1)
self.dc6_conv2 = conv(128, 128, kernel_size=3, stride=1, padding=2, dilation=2)
self.dc6_conv3 = conv(128, 128, kernel_size=3, stride=1, padding=4, dilation=4)
self.dc6_conv4 = conv(128, 96, kernel_size=3, stride=1, padding=8, dilation=8)
self.dc6_conv5 = conv(96, 64, kernel_size=3, stride=1, padding=16, dilation=16)
self.dc6_conv6 = conv(64, 32, kernel_size=3, stride=1, padding=1, dilation=1)
self.dc6_conv7 = nn.Conv2d(32,2*fdimb1,kernel_size=3,stride=1,padding=1,bias=True)
# c5
self.dc5_conv1 = conv(128+4*fdimb1*2, 128, kernel_size=3, stride=1, padding=1, dilation=1)
self.dc5_conv2 = conv(128, 128, kernel_size=3, stride=1, padding=2, dilation=2)
self.dc5_conv3 = conv(128, 128, kernel_size=3, stride=1, padding=4, dilation=4)
self.dc5_conv4 = conv(128, 96, kernel_size=3, stride=1, padding=8, dilation=8)
self.dc5_conv5 = conv(96, 64, kernel_size=3, stride=1, padding=16, dilation=16)
self.dc5_conv6 = conv(64, 32, kernel_size=3, stride=1, padding=1, dilation=1)
self.dc5_conv7 = nn.Conv2d(32,2*fdimb1*2,kernel_size=3,stride=1,padding=1,bias=True)
# c4
self.dc4_conv1 = conv(128+4*fdimb1*3, 128, kernel_size=3, stride=1, padding=1, dilation=1)
self.dc4_conv2 = conv(128, 128, kernel_size=3, stride=1, padding=2, dilation=2)
self.dc4_conv3 = conv(128, 128, kernel_size=3, stride=1, padding=4, dilation=4)
self.dc4_conv4 = conv(128, 96, kernel_size=3, stride=1, padding=8, dilation=8)
self.dc4_conv5 = conv(96, 64, kernel_size=3, stride=1, padding=16, dilation=16)
self.dc4_conv6 = conv(64, 32, kernel_size=3, stride=1, padding=1, dilation=1)
self.dc4_conv7 = nn.Conv2d(32,2*fdimb1*3,kernel_size=3,stride=1,padding=1,bias=True)
# c3
self.dc3_conv1 = conv(64+16*fdimb1, 128, kernel_size=3, stride=1, padding=1, dilation=1)
self.dc3_conv2 = conv(128, 128, kernel_size=3, stride=1, padding=2, dilation=2)
self.dc3_conv3 = conv(128, 128, kernel_size=3, stride=1, padding=4, dilation=4)
self.dc3_conv4 = conv(128, 96, kernel_size=3, stride=1, padding=8, dilation=8)
self.dc3_conv5 = conv(96, 64, kernel_size=3, stride=1, padding=16, dilation=16)
self.dc3_conv6 = conv(64, 32, kernel_size=3, stride=1, padding=1, dilation=1)
self.dc3_conv7 = nn.Conv2d(32,8*fdimb1,kernel_size=3,stride=1,padding=1,bias=True)
# c2
self.dc2_conv1 = conv(64+16*fdimb1+4*fdimb2, 128, kernel_size=3, stride=1, padding=1, dilation=1)
self.dc2_conv2 = conv(128, 128, kernel_size=3, stride=1, padding=2, dilation=2)
self.dc2_conv3 = conv(128, 128, kernel_size=3, stride=1, padding=4, dilation=4)
self.dc2_conv4 = conv(128, 96, kernel_size=3, stride=1, padding=8, dilation=8)
self.dc2_conv5 = conv(96, 64, kernel_size=3, stride=1, padding=16, dilation=16)
self.dc2_conv6 = conv(64, 32, kernel_size=3, stride=1, padding=1, dilation=1)
self.dc2_conv7 = nn.Conv2d(32,4*2*fdimb1 + 2*fdimb2,kernel_size=3,stride=1,padding=1,bias=True)
self.dc6_conv = nn.Sequential( self.dc6_conv1,
self.dc6_conv2,
self.dc6_conv3,
self.dc6_conv4,
self.dc6_conv5,
self.dc6_conv6,
self.dc6_conv7)
self.dc5_conv = nn.Sequential( self.dc5_conv1,
self.dc5_conv2,
self.dc5_conv3,
self.dc5_conv4,
self.dc5_conv5,
self.dc5_conv6,
self.dc5_conv7)
self.dc4_conv = nn.Sequential( self.dc4_conv1,
self.dc4_conv2,
self.dc4_conv3,
self.dc4_conv4,
self.dc4_conv5,
self.dc4_conv6,
self.dc4_conv7)
self.dc3_conv = nn.Sequential( self.dc3_conv1,
self.dc3_conv2,
self.dc3_conv3,
self.dc3_conv4,
self.dc3_conv5,
self.dc3_conv6,
self.dc3_conv7)
self.dc2_conv = nn.Sequential( self.dc2_conv1,
self.dc2_conv2,
self.dc2_conv3,
self.dc2_conv4,
self.dc2_conv5,
self.dc2_conv6,
self.dc2_conv7)
## Out-of-range detection
self.dc6_convo = nn.Sequential(conv(128+4*fdimb1, 128, kernel_size=3, stride=1, padding=1, dilation=1),
conv(128, 128, kernel_size=3, stride=1, padding=2, dilation=2),
conv(128, 128, kernel_size=3, stride=1, padding=4, dilation=4),
conv(128, 96, kernel_size=3, stride=1, padding=8, dilation=8),
conv(96, 64, kernel_size=3, stride=1, padding=16, dilation=16),
conv(64, 32, kernel_size=3, stride=1, padding=1, dilation=1),
nn.Conv2d(32,1,kernel_size=3,stride=1,padding=1,bias=True))
self.dc5_convo = nn.Sequential(conv(128+2*4*fdimb1, 128, kernel_size=3, stride=1, padding=1, dilation=1),
conv(128, 128, kernel_size=3, stride=1, padding=2, dilation=2),
conv(128, 128, kernel_size=3, stride=1, padding=4, dilation=4),
conv(128, 96, kernel_size=3, stride=1, padding=8, dilation=8),
conv(96, 64, kernel_size=3, stride=1, padding=16, dilation=16),
conv(64, 32, kernel_size=3, stride=1, padding=1, dilation=1),
nn.Conv2d(32,1,kernel_size=3,stride=1,padding=1,bias=True))
self.dc4_convo = nn.Sequential(conv(128+3*4*fdimb1, 128, kernel_size=3, stride=1, padding=1, dilation=1),
conv(128, 128, kernel_size=3, stride=1, padding=2, dilation=2),
conv(128, 128, kernel_size=3, stride=1, padding=4, dilation=4),
conv(128, 96, kernel_size=3, stride=1, padding=8, dilation=8),
conv(96, 64, kernel_size=3, stride=1, padding=16, dilation=16),
conv(64, 32, kernel_size=3, stride=1, padding=1, dilation=1),
nn.Conv2d(32,1,kernel_size=3,stride=1,padding=1,bias=True))
self.dc3_convo = nn.Sequential(conv(64+16*fdimb1, 128, kernel_size=3, stride=1, padding=1, dilation=1),
conv(128, 128, kernel_size=3, stride=1, padding=2, dilation=2),
conv(128, 128, kernel_size=3, stride=1, padding=4, dilation=4),
conv(128, 96, kernel_size=3, stride=1, padding=8, dilation=8),
conv(96, 64, kernel_size=3, stride=1, padding=16, dilation=16),
conv(64, 32, kernel_size=3, stride=1, padding=1, dilation=1),
nn.Conv2d(32,1,kernel_size=3,stride=1,padding=1,bias=True))
self.dc2_convo = nn.Sequential(conv(64+16*fdimb1+4*fdimb2, 128, kernel_size=3, stride=1, padding=1, dilation=1),
conv(128, 128, kernel_size=3, stride=1, padding=2, dilation=2),
conv(128, 128, kernel_size=3, stride=1, padding=4, dilation=4),
conv(128, 96, kernel_size=3, stride=1, padding=8, dilation=8),
conv(96, 64, kernel_size=3, stride=1, padding=16, dilation=16),
conv(64, 32, kernel_size=3, stride=1, padding=1, dilation=1),
nn.Conv2d(32,1,kernel_size=3,stride=1,padding=1,bias=True))
# affine-exp
self.f3d2v1 = conv(64, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.f3d2v2 = conv(1, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.f3d2v3 = conv(1, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.f3d2v4 = conv(1, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.f3d2v5 = conv(64, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.f3d2v6 = conv(12*81, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.f3d2 = bfmodule(128-64,1)
# depth change net
self.dcnetv1 = conv(64, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.dcnetv2 = conv(1, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.dcnetv3 = conv(1, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.dcnetv4 = conv(1, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.dcnetv5 = conv(12*81, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
self.dcnetv6 = conv(4, 32, kernel_size=3, stride=1, padding=1,dilation=1) #
if exp_unc:
self.dcnet = bfmodule(128,2)
else:
self.dcnet = bfmodule(128,1)
for m in self.modules():
if isinstance(m, nn.Conv3d):
n = m.kernel_size[0] * m.kernel_size[1]*m.kernel_size[2] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if hasattr(m.bias,'data'):
m.bias.data.zero_()
self.facs = [self.fac,1,1,1,1]
self.warp_modules = nn.ModuleList([None, self.warp5, self.warp4, self.warp3, self.warp2])
self.f_modules = nn.ModuleList([self.f6, self.f5, self.f4, self.f3, self.f2])
self.p_modules = nn.ModuleList([self.p6, self.p5, self.p4, self.p3, self.p2])
self.reg_modules = nn.ModuleList([self.flow_reg64, self.flow_reg32, self.flow_reg16, self.flow_reg8, self.flow_reg4])
self.oor_modules = nn.ModuleList([self.dc6_convo, self.dc5_convo, self.dc4_convo, self.dc3_convo, self.dc2_convo])
self.fuse_modules = nn.ModuleList([self.dc6_conv, self.dc5_conv, self.dc4_conv, self.dc3_conv, self.dc2_conv])
def corrf(self, refimg_fea, targetimg_fea,maxdisp, fac=1):
if self.training:
#fast correlation function
b,c,h,w = refimg_fea.shape
targetimg_fea = F.unfold(targetimg_fea, (2*int(maxdisp)//fac+1,2*maxdisp+1), padding=(int(maxdisp)//fac,maxdisp)).view(b,c, 2*int(maxdisp)//fac+1,2*maxdisp+1,h,w).permute(0,1,3,2,4,5).contiguous()
cost = refimg_fea.view(b,c,h,w)[:,:,np.newaxis, np.newaxis]*targetimg_fea
cost = F.leaky_relu(cost, 0.1,inplace=True)
else:
#slow correlation function
b,c,height,width = refimg_fea.shape
if refimg_fea.is_cuda:
cost = Variable(torch.cuda.FloatTensor(b,c,2*maxdisp+1,2*int(maxdisp//fac)+1,height,width)).fill_(0.) # b,c,u,v,h,w
else:
cost = Variable(torch.FloatTensor(b,c,2*maxdisp+1,2*int(maxdisp//fac)+1,height,width)).fill_(0.) # b,c,u,v,h,w
for i in range(2*maxdisp+1):
ind = i-maxdisp
for j in range(2*int(maxdisp//fac)+1):
indd = j-int(maxdisp//fac)
feata = refimg_fea[:,:,max(0,-indd):height-indd,max(0,-ind):width-ind]
featb = targetimg_fea[:,:,max(0,+indd):height+indd,max(0,ind):width+ind]
diff = (feata*featb)
cost[:, :, i,j,max(0,-indd):height-indd,max(0,-ind):width-ind] = diff # standard
cost = F.leaky_relu(cost, 0.1,inplace=True)
return cost
def cost_matching(self,up_flow, c1, c2, flowh, enth, level):
"""
up_flow: upsample coarse flow
c1: normalized feature of image 1
c2: normalized feature of image 2
flowh: flow hypotheses
enth: entropy
"""
# normalize
c1n = c1 / (c1.norm(dim=1, keepdim=True)+1e-9)
c2n = c2 / (c2.norm(dim=1, keepdim=True)+1e-9)
# cost volume
if level == 0:
warp = c2n
else:
warp,_ = self.warp_modules[level](c2n, up_flow)
feat = self.corrf(c1n,warp,self.md[level],fac=self.facs[level])
feat = self.f_modules[level](feat)
cost = self.p_modules[level](feat) # b, 16, u,v,h,w
# soft WTA
b,c,u,v,h,w = cost.shape
cost = cost.view(-1,u,v,h,w) # bx16, 9,9,h,w, also predict uncertainty from here
flowhh,enthh = self.reg_modules[level](cost) # bx16, 2, h, w
flowhh = flowhh.view(b,c,2,h,w)
if level > 0:
flowhh = flowhh + up_flow[:,np.newaxis]
flowhh = flowhh.view(b,-1,h,w) # b, 16*2, h, w
enthh = enthh.view(b,-1,h,w) # b, 16*1, h, w
# append coarse hypotheses
if level == 0:
flowh = flowhh
enth = enthh
else:
flowh = torch.cat((flowhh, F.upsample(flowh.detach()*2, [flowhh.shape[2],flowhh.shape[3]], mode='bilinear')),1) # b, k2--k2, h, w
enth = torch.cat((enthh, F.upsample(enth, [flowhh.shape[2],flowhh.shape[3]], mode='bilinear')),1)
if self.training or level==4:
x = torch.cat((enth.detach(), flowh.detach(), c1),1)
oor = self.oor_modules[level](x)[:,0]
else: oor = None
# hypotheses fusion
x = torch.cat((enth.detach(), flowh.detach(), c1),1)
va = self.fuse_modules[level](x)
va = va.view(b,-1,2,h,w)
flow = ( flowh.view(b,-1,2,h,w) * F.softmax(va,1) ).sum(1) # b, 2k, 2, h, w
return flow, flowh, enth, oor
def affine(self,pref,flow, pw=1):
b,_,lh,lw=flow.shape
ptar = pref + flow
pw = 1
pref = F.unfold(pref, (pw*2+1,pw*2+1), padding=(pw)).view(b,2,(pw*2+1)**2,lh,lw)-pref[:,:,np.newaxis]
ptar = F.unfold(ptar, (pw*2+1,pw*2+1), padding=(pw)).view(b,2,(pw*2+1)**2,lh,lw)-ptar[:,:,np.newaxis] # b, 2,9,h,w
pref = pref.permute(0,3,4,1,2).reshape(b*lh*lw,2,(pw*2+1)**2)
ptar = ptar.permute(0,3,4,1,2).reshape(b*lh*lw,2,(pw*2+1)**2)
prefprefT = pref.matmul(pref.permute(0,2,1))
ppdet = prefprefT[:,0,0]*prefprefT[:,1,1]-prefprefT[:,1,0]*prefprefT[:,0,1]
ppinv = torch.cat((prefprefT[:,1,1:],-prefprefT[:,0,1:], -prefprefT[:,1:,0], prefprefT[:,0:1,0]),1).view(-1,2,2)/ppdet.clamp(1e-10,np.inf)[:,np.newaxis,np.newaxis]
Affine = ptar.matmul(pref.permute(0,2,1)).matmul(ppinv)
Error = (Affine.matmul(pref)-ptar).norm(2,1).mean(1).view(b,1,lh,lw)
Avol = (Affine[:,0,0]*Affine[:,1,1]-Affine[:,1,0]*Affine[:,0,1]).view(b,1,lh,lw).abs().clamp(1e-10,np.inf)
exp = Avol.sqrt()
mask = (exp>0.5) & (exp<2) & (Error<0.1)
mask = mask[:,0]
exp = exp.clamp(0.5,2)
exp[Error>0.1]=1
return exp, Error, mask
def affine_mask(self,pref,flow, pw=3):
"""
pref: reference coordinates
pw: patch width
"""
flmask = flow[:,2:]
flow = flow[:,:2]
b,_,lh,lw=flow.shape
ptar = pref + flow
pref = F.unfold(pref, (pw*2+1,pw*2+1), padding=(pw)).view(b,2,(pw*2+1)**2,lh,lw)-pref[:,:,np.newaxis]
ptar = F.unfold(ptar, (pw*2+1,pw*2+1), padding=(pw)).view(b,2,(pw*2+1)**2,lh,lw)-ptar[:,:,np.newaxis] # b, 2,9,h,w
conf_flow = flmask
conf_flow = F.unfold(conf_flow,(pw*2+1,pw*2+1), padding=(pw)).view(b,1,(pw*2+1)**2,lh,lw)
count = conf_flow.sum(2,keepdims=True)
conf_flow = ((pw*2+1)**2)*conf_flow / count
pref = pref * conf_flow
ptar = ptar * conf_flow
pref = pref.permute(0,3,4,1,2).reshape(b*lh*lw,2,(pw*2+1)**2)
ptar = ptar.permute(0,3,4,1,2).reshape(b*lh*lw,2,(pw*2+1)**2)
prefprefT = pref.matmul(pref.permute(0,2,1))
ppdet = prefprefT[:,0,0]*prefprefT[:,1,1]-prefprefT[:,1,0]*prefprefT[:,0,1]
ppinv = torch.cat((prefprefT[:,1,1:],-prefprefT[:,0,1:], -prefprefT[:,1:,0], prefprefT[:,0:1,0]),1).view(-1,2,2)/ppdet.clamp(1e-10,np.inf)[:,np.newaxis,np.newaxis]
Affine = ptar.matmul(pref.permute(0,2,1)).matmul(ppinv)
Error = (Affine.matmul(pref)-ptar).norm(2,1).mean(1).view(b,1,lh,lw)
Avol = (Affine[:,0,0]*Affine[:,1,1]-Affine[:,1,0]*Affine[:,0,1]).view(b,1,lh,lw).abs().clamp(1e-10,np.inf)
exp = Avol.sqrt()
mask = (exp>0.5) & (exp<2) & (Error<0.2) & (flmask.bool()) & (count[:,0]>4)
mask = mask[:,0]
exp = exp.clamp(0.5,2)
exp[Error>0.2]=1
return exp, Error, mask
def get_oor_loss(self, flowl0, oor3, maxdisp, occ_mask,mask):
"""
return out-of-range loss
"""
oor3_gt = (flowl0.abs() > maxdisp).detach() # (8*self.md[3])
oor3_gt = (((oor3_gt.sum(1)>0) + occ_mask)>0).float() # oor, or occluded
#weights = oor3_gt.sum().float()/(oor3_gt.shape[0]*oor3_gt.shape[1]*oor3_gt.shape[2])
oor3_gt = oor3_gt[mask]
weights = oor3_gt.sum().float()/(oor3_gt.shape[0])
weights = oor3_gt * (1-weights) + (1-oor3_gt) * weights
loss_oor3 = F.binary_cross_entropy_with_logits(oor3[mask],oor3_gt,size_average=True, weight=weights)
return loss_oor3
def weight_parameters(self):
return [param for name, param in self.named_parameters() if 'weight' in name]
def bias_parameters(self):
return [param for name, param in self.named_parameters() if 'bias' in name]
def forward(self,im,disc_aux=None,disp_input=None):
bs = im.shape[0]//2
if self.training and disc_aux[-1]: # if only fine-tuning expansion
reset=True
self.eval()
torch.set_grad_enabled(False)
else: reset=False
c06,c05,c04,c03,c02 = self.pspnet(im)
c16 = c06[:bs]; c26 = c06[bs:]
c15 = c05[:bs]; c25 = c05[bs:]
c14 = c04[:bs]; c24 = c04[bs:]
c13 = c03[:bs]; c23 = c03[bs:]
c12 = c02[:bs]; c22 = c02[bs:]
## matching 6
flow6, flow6h, ent6h, oor6 = self.cost_matching(None, c16, c26, None, None,level=0)
## matching 5
up_flow6 = F.upsample(flow6, [im.size()[2]//32,im.size()[3]//32], mode='bilinear')*2
flow5, flow5h, ent5h, oor5 = self.cost_matching(up_flow6, c15, c25, flow6h, ent6h,level=1)
## matching 4
up_flow5 = F.upsample(flow5, [im.size()[2]//16,im.size()[3]//16], mode='bilinear')*2
flow4, flow4h, ent4h, oor4 = self.cost_matching(up_flow5, c14, c24, flow5h, ent5h,level=2)
## matching 3
up_flow4 = F.upsample(flow4, [im.size()[2]//8,im.size()[3]//8], mode='bilinear')*2
flow3, flow3h, ent3h, oor3 = self.cost_matching(up_flow4, c13, c23, flow4h, ent4h,level=3)
## matching 2
up_flow3 = F.upsample(flow3, [im.size()[2]//4,im.size()[3]//4], mode='bilinear')*2
flow2, flow2h, ent2h, oor2 = self.cost_matching(up_flow3, c12, c22, flow3h, ent3h,level=4)
if reset and disc_aux[-1] == 1:
torch.set_grad_enabled(True)
self.train()
if not self.training or disc_aux[-1]:
# expansion
b,_,h,w = flow2.shape
exp2,err2,_ = self.affine(get_grid(b,h,w)[:,0].permute(0,3,1,2).repeat(b,1,1,1).clone(), flow2.detach(),pw=1)
x = torch.cat((
self.f3d2v2(-exp2.log()),
self.f3d2v3(err2),
),1)
dchange2 = -exp2.log()+1./200*self.f3d2(x)[0]
# depth change net
iexp2 = F.upsample(dchange2.clone(), [im.size()[2],im.size()[3]], mode='bilinear')
x = torch.cat((self.dcnetv1(c12.detach()),
self.dcnetv2(dchange2.detach()),
self.dcnetv3(-exp2.log()),
self.dcnetv4(err2),
),1)
dcneto = 1./200*self.dcnet(x)[0]
dchange2 = dchange2.detach() + dcneto[:,:1]
dchange2 = F.upsample(dchange2, [im.size()[2],im.size()[3]], mode='bilinear')
if dcneto.shape[1]>1:
dc_unc = dcneto[:,1:2]
else:
dc_unc = torch.zeros_like(dcneto)
dc_unc = F.upsample(dc_unc, [im.size()[2],im.size()[3]], mode='bilinear')[:,0]
flow2 = F.upsample(flow2.detach(), [im.size()[2],im.size()[3]], mode='bilinear')*4
return flow2, oor2[0], dchange2[0,0], iexp2[0,0]
|
banmo-main
|
third_party/vcnplus/models/VCNplus.py
|
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao (Bin.Xiao@microsoft.com)
# Modified by Dequan Wang and Xingyi Zhou
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import math
import logging
import torch
import torch.nn as nn
from .DCNv2.DCN.dcn_v2 import DCN
import torch.utils.model_zoo as model_zoo
BN_MOMENTUM = 0.1
logger = logging.getLogger(__name__)
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1,
bias=False)
self.bn3 = nn.BatchNorm2d(planes * self.expansion,
momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
def fill_up_weights(up):
w = up.weight.data
f = math.ceil(w.size(2) / 2)
c = (2 * f - 1 - f % 2) / (2. * f)
for i in range(w.size(2)):
for j in range(w.size(3)):
w[0, 0, i, j] = \
(1 - math.fabs(i / f - c)) * (1 - math.fabs(j / f - c))
for c in range(1, w.size(0)):
w[c, 0, :, :] = w[0, 0, :, :]
def fill_fc_weights(layers):
for m in layers.modules():
if isinstance(m, nn.Conv2d):
nn.init.normal_(m.weight, std=0.001)
# torch.nn.init.kaiming_normal_(m.weight.data, nonlinearity='relu')
# torch.nn.init.xavier_normal_(m.weight.data)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
class PoseResNet(nn.Module):
def __init__(self, block, layers, heads, head_conv):
self.inplanes = 64
self.heads = heads
self.deconv_with_bias = False
super(PoseResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
# used for deconv layers
self.deconv_layers = self._make_deconv_layer(
3,
[256, 128, 64],
[4, 4, 4],
)
for head in self.heads:
classes = self.heads[head]
if head_conv > 0:
fc = nn.Sequential(
nn.Conv2d(64, head_conv,
kernel_size=3, padding=1, bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(head_conv, classes,
kernel_size=1, stride=1,
padding=0, bias=True))
if 'hm' in head:
fc[-1].bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
else:
fc = nn.Conv2d(64, classes,
kernel_size=1, stride=1,
padding=0, bias=True)
if 'hm' in head:
fc.bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
self.__setattr__(head, fc)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion, momentum=BN_MOMENTUM),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def _get_deconv_cfg(self, deconv_kernel, index):
if deconv_kernel == 4:
padding = 1
output_padding = 0
elif deconv_kernel == 3:
padding = 1
output_padding = 1
elif deconv_kernel == 2:
padding = 0
output_padding = 0
return deconv_kernel, padding, output_padding
def _make_deconv_layer(self, num_layers, num_filters, num_kernels):
assert num_layers == len(num_filters), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
assert num_layers == len(num_kernels), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
layers = []
for i in range(num_layers):
kernel, padding, output_padding = \
self._get_deconv_cfg(num_kernels[i], i)
planes = num_filters[i]
fc = DCN(self.inplanes, planes,
kernel_size=(3,3), stride=1,
padding=1, dilation=1, deformable_groups=1)
# fc = nn.Conv2d(self.inplanes, planes,
# kernel_size=3, stride=1,
# padding=1, dilation=1, bias=False)
# fill_fc_weights(fc)
up = nn.ConvTranspose2d(
in_channels=planes,
out_channels=planes,
kernel_size=kernel,
stride=2,
padding=padding,
output_padding=output_padding,
bias=self.deconv_with_bias)
fill_up_weights(up)
layers.append(fc)
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
layers.append(up)
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
self.inplanes = planes
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.deconv_layers(x)
ret = {}
for head in self.heads:
ret[head] = self.__getattr__(head)(x)
return [ret]
def init_weights(self, num_layers):
if 1:
url = model_urls['resnet{}'.format(num_layers)]
pretrained_state_dict = model_zoo.load_url(url)
print('=> loading pretrained model {}'.format(url))
self.load_state_dict(pretrained_state_dict, strict=False)
print('=> init deconv weights from normal distribution')
for name, m in self.deconv_layers.named_modules():
if isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
resnet_spec = {18: (BasicBlock, [2, 2, 2, 2]),
34: (BasicBlock, [3, 4, 6, 3]),
50: (Bottleneck, [3, 4, 6, 3]),
101: (Bottleneck, [3, 4, 23, 3]),
152: (Bottleneck, [3, 8, 36, 3])}
def get_pose_net(num_layers, heads, head_conv=256):
block_class, layers = resnet_spec[num_layers]
model = PoseResNet(block_class, layers, heads, head_conv=head_conv)
model.init_weights(num_layers)
return model
|
banmo-main
|
third_party/vcnplus/models/networks/resnet_dcn.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import math
import logging
import numpy as np
from os.path import join
import torch
from torch import nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
from .DCNv2.DCN.dcn_v2 import DCN
BN_MOMENTUM = 0.1
logger = logging.getLogger(__name__)
def get_model_url(data='imagenet', name='dla34', hash='ba72cf86'):
return join('http://dl.yf.io/dla/models', data, '{}-{}.pth'.format(name, hash))
def conv3x3(in_planes, out_planes, stride=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
def __init__(self, inplanes, planes, stride=1, dilation=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3,
stride=stride, padding=dilation,
bias=False, dilation=dilation)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
stride=1, padding=dilation,
bias=False, dilation=dilation)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.stride = stride
def forward(self, x, residual=None):
if residual is None:
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 2
def __init__(self, inplanes, planes, stride=1, dilation=1):
super(Bottleneck, self).__init__()
expansion = Bottleneck.expansion
bottle_planes = planes // expansion
self.conv1 = nn.Conv2d(inplanes, bottle_planes,
kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(bottle_planes, momentum=BN_MOMENTUM)
self.conv2 = nn.Conv2d(bottle_planes, bottle_planes, kernel_size=3,
stride=stride, padding=dilation,
bias=False, dilation=dilation)
self.bn2 = nn.BatchNorm2d(bottle_planes, momentum=BN_MOMENTUM)
self.conv3 = nn.Conv2d(bottle_planes, planes,
kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.stride = stride
def forward(self, x, residual=None):
if residual is None:
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
out += residual
out = self.relu(out)
return out
class BottleneckX(nn.Module):
expansion = 2
cardinality = 32
def __init__(self, inplanes, planes, stride=1, dilation=1):
super(BottleneckX, self).__init__()
cardinality = BottleneckX.cardinality
# dim = int(math.floor(planes * (BottleneckV5.expansion / 64.0)))
# bottle_planes = dim * cardinality
bottle_planes = planes * cardinality // 32
self.conv1 = nn.Conv2d(inplanes, bottle_planes,
kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(bottle_planes, momentum=BN_MOMENTUM)
self.conv2 = nn.Conv2d(bottle_planes, bottle_planes, kernel_size=3,
stride=stride, padding=dilation, bias=False,
dilation=dilation, groups=cardinality)
self.bn2 = nn.BatchNorm2d(bottle_planes, momentum=BN_MOMENTUM)
self.conv3 = nn.Conv2d(bottle_planes, planes,
kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.stride = stride
def forward(self, x, residual=None):
if residual is None:
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
out += residual
out = self.relu(out)
return out
class Root(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, residual):
super(Root, self).__init__()
self.conv = nn.Conv2d(
in_channels, out_channels, 1,
stride=1, bias=False, padding=(kernel_size - 1) // 2)
self.bn = nn.BatchNorm2d(out_channels, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.residual = residual
def forward(self, *x):
children = x
x = self.conv(torch.cat(x, 1))
x = self.bn(x)
if self.residual:
x += children[0]
x = self.relu(x)
return x
class Tree(nn.Module):
def __init__(self, levels, block, in_channels, out_channels, stride=1,
level_root=False, root_dim=0, root_kernel_size=1,
dilation=1, root_residual=False):
super(Tree, self).__init__()
if root_dim == 0:
root_dim = 2 * out_channels
if level_root:
root_dim += in_channels
if levels == 1:
self.tree1 = block(in_channels, out_channels, stride,
dilation=dilation)
self.tree2 = block(out_channels, out_channels, 1,
dilation=dilation)
else:
self.tree1 = Tree(levels - 1, block, in_channels, out_channels,
stride, root_dim=0,
root_kernel_size=root_kernel_size,
dilation=dilation, root_residual=root_residual)
self.tree2 = Tree(levels - 1, block, out_channels, out_channels,
root_dim=root_dim + out_channels,
root_kernel_size=root_kernel_size,
dilation=dilation, root_residual=root_residual)
if levels == 1:
self.root = Root(root_dim, out_channels, root_kernel_size,
root_residual)
self.level_root = level_root
self.root_dim = root_dim
self.downsample = None
self.project = None
self.levels = levels
if stride > 1:
self.downsample = nn.MaxPool2d(stride, stride=stride)
if in_channels != out_channels:
self.project = nn.Sequential(
nn.Conv2d(in_channels, out_channels,
kernel_size=1, stride=1, bias=False),
nn.BatchNorm2d(out_channels, momentum=BN_MOMENTUM)
)
def forward(self, x, residual=None, children=None):
children = [] if children is None else children
bottom = self.downsample(x) if self.downsample else x
residual = self.project(bottom) if self.project else bottom
if self.level_root:
children.append(bottom)
x1 = self.tree1(x, residual)
if self.levels == 1:
x2 = self.tree2(x1)
x = self.root(x2, x1, *children)
else:
children.append(x1)
x = self.tree2(x1, children=children)
return x
class DLA(nn.Module):
def __init__(self, levels, channels, num_classes=1000,
block=BasicBlock, residual_root=False, linear_root=False,num_input=14):
super(DLA, self).__init__()
self.channels = channels
self.num_classes = num_classes
self.base_layer = nn.Sequential(
nn.Conv2d(num_input, channels[0], kernel_size=7, stride=1,
padding=3, bias=False),
nn.BatchNorm2d(channels[0], momentum=BN_MOMENTUM),
nn.ReLU(inplace=True))
self.level0 = self._make_conv_level(
channels[0], channels[0], levels[0])
self.level1 = self._make_conv_level(
channels[0], channels[1], levels[1], stride=2)
self.level2 = Tree(levels[2], block, channels[1], channels[2], 2,
level_root=False,
root_residual=residual_root)
self.level3 = Tree(levels[3], block, channels[2], channels[3], 2,
level_root=True, root_residual=residual_root)
self.level4 = Tree(levels[4], block, channels[3], channels[4], 2,
level_root=True, root_residual=residual_root)
self.level5 = Tree(levels[5], block, channels[4], channels[5], 2,
level_root=True, root_residual=residual_root)
# for m in self.modules():
# if isinstance(m, nn.Conv2d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
# elif isinstance(m, nn.BatchNorm2d):
# m.weight.data.fill_(1)
# m.bias.data.zero_()
def _make_level(self, block, inplanes, planes, blocks, stride=1):
downsample = None
if stride != 1 or inplanes != planes:
downsample = nn.Sequential(
nn.MaxPool2d(stride, stride=stride),
nn.Conv2d(inplanes, planes,
kernel_size=1, stride=1, bias=False),
nn.BatchNorm2d(planes, momentum=BN_MOMENTUM),
)
layers = []
layers.append(block(inplanes, planes, stride, downsample=downsample))
for i in range(1, blocks):
layers.append(block(inplanes, planes))
return nn.Sequential(*layers)
def _make_conv_level(self, inplanes, planes, convs, stride=1, dilation=1):
modules = []
for i in range(convs):
modules.extend([
nn.Conv2d(inplanes, planes, kernel_size=3,
stride=stride if i == 0 else 1,
padding=dilation, bias=False, dilation=dilation),
nn.BatchNorm2d(planes, momentum=BN_MOMENTUM),
nn.ReLU(inplace=True)])
inplanes = planes
return nn.Sequential(*modules)
def forward(self, x):
y = []
x = self.base_layer(x)
for i in range(6):
x = getattr(self, 'level{}'.format(i))(x)
y.append(x)
return y
def load_pretrained_model(self, data='imagenet', name='dla34', hash='ba72cf86'):
# fc = self.fc
if name.endswith('.pth'):
model_weights = torch.load(data + name)
else:
model_url = get_model_url(data, name, hash)
model_weights = model_zoo.load_url(model_url)
num_classes = len(model_weights[list(model_weights.keys())[-1]])
self.fc = nn.Conv2d(
self.channels[-1], num_classes,
kernel_size=1, stride=1, padding=0, bias=True)
self.load_state_dict(model_weights)
# self.fc = fc
def dla34(pretrained=True, **kwargs): # DLA-34
model = DLA([1, 1, 1, 2, 2, 1],
[16, 32, 64, 128, 256, 512],
block=BasicBlock, **kwargs)
if pretrained:
model.load_pretrained_model(data='imagenet', name='dla34', hash='ba72cf86')
return model
class Identity(nn.Module):
def __init__(self):
super(Identity, self).__init__()
def forward(self, x):
return x
def fill_fc_weights(layers):
for m in layers.modules():
if isinstance(m, nn.Conv2d):
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def fill_up_weights(up):
w = up.weight.data
f = math.ceil(w.size(2) / 2)
c = (2 * f - 1 - f % 2) / (2. * f)
for i in range(w.size(2)):
for j in range(w.size(3)):
w[0, 0, i, j] = \
(1 - math.fabs(i / f - c)) * (1 - math.fabs(j / f - c))
for c in range(1, w.size(0)):
w[c, 0, :, :] = w[0, 0, :, :]
class DeformConv(nn.Module):
def __init__(self, chi, cho):
super(DeformConv, self).__init__()
self.actf = nn.Sequential(
nn.BatchNorm2d(cho, momentum=BN_MOMENTUM),
nn.ReLU(inplace=True)
)
self.conv = DCN(chi, cho, kernel_size=(3,3), stride=1, padding=1, dilation=1, deformable_groups=1)
def forward(self, x):
x = self.conv(x)
x = self.actf(x)
return x
class IDAUp(nn.Module):
def __init__(self, o, channels, up_f):
super(IDAUp, self).__init__()
for i in range(1, len(channels)):
c = channels[i]
f = int(up_f[i])
proj = DeformConv(c, o)
node = DeformConv(o, o)
up = nn.ConvTranspose2d(o, o, f * 2, stride=f,
padding=f // 2, output_padding=0,
groups=o, bias=False)
fill_up_weights(up)
setattr(self, 'proj_' + str(i), proj)
setattr(self, 'up_' + str(i), up)
setattr(self, 'node_' + str(i), node)
def forward(self, layers, startp, endp):
for i in range(startp + 1, endp):
upsample = getattr(self, 'up_' + str(i - startp))
project = getattr(self, 'proj_' + str(i - startp))
layers[i] = upsample(project(layers[i]))
node = getattr(self, 'node_' + str(i - startp))
layers[i] = node(layers[i] + layers[i - 1])
class DLAUp(nn.Module):
def __init__(self, startp, channels, scales, in_channels=None):
super(DLAUp, self).__init__()
self.startp = startp
if in_channels is None:
in_channels = channels
self.channels = channels
channels = list(channels)
scales = np.array(scales, dtype=int)
for i in range(len(channels) - 1):
j = -i - 2
setattr(self, 'ida_{}'.format(i),
IDAUp(channels[j], in_channels[j:],
scales[j:] // scales[j]))
scales[j + 1:] = scales[j]
in_channels[j + 1:] = [channels[j] for _ in channels[j + 1:]]
def forward(self, layers):
out = [layers[-1]] # start with 32
for i in range(len(layers) - self.startp - 1):
ida = getattr(self, 'ida_{}'.format(i))
ida(layers, len(layers) -i - 2, len(layers))
out.insert(0, layers[-1])
return out
class Interpolate(nn.Module):
def __init__(self, scale, mode):
super(Interpolate, self).__init__()
self.scale = scale
self.mode = mode
def forward(self, x):
x = F.interpolate(x, scale_factor=self.scale, mode=self.mode, align_corners=False)
return x
class DLASeg(nn.Module):
def __init__(self, base_name, heads, pretrained, down_ratio, final_kernel,
last_level, head_conv, out_channel=0,num_input=14):
super(DLASeg, self).__init__()
assert down_ratio in [2, 4, 8, 16]
self.first_level = int(np.log2(down_ratio))
self.last_level = last_level
self.base = globals()[base_name](pretrained=pretrained,num_input=num_input)
channels = self.base.channels
scales = [2 ** i for i in range(len(channels[self.first_level:]))]
self.dla_up = DLAUp(self.first_level, channels[self.first_level:], scales)
if out_channel == 0:
out_channel = channels[self.first_level]
self.ida_up = IDAUp(out_channel, channels[self.first_level:self.last_level],
[2 ** i for i in range(self.last_level - self.first_level)])
self.heads = heads
for head in self.heads:
classes = self.heads[head]
if head_conv > 0:
fc = nn.Sequential(
nn.Conv2d(channels[self.first_level], head_conv,
kernel_size=3, padding=1, bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(head_conv, classes,
kernel_size=final_kernel, stride=1,
padding=final_kernel // 2, bias=True))
if 'hm' in head:
fc[-1].bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
else:
fc = nn.Conv2d(channels[self.first_level], classes,
kernel_size=final_kernel, stride=1,
padding=final_kernel // 2, bias=True)
if 'hm' in head:
fc.bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
self.__setattr__(head, fc)
def forward(self, x):
x = self.base(x)
x = self.dla_up(x)
y = []
for i in range(self.last_level - self.first_level):
y.append(x[i].clone())
self.ida_up(y, 0, len(y))
z = {}
for head in self.heads:
z[head] = self.__getattr__(head)(y[-1])
return [z]
def get_pose_net(num_layers, heads, head_conv=256, down_ratio=4,num_input=14):
model = DLASeg('dla{}'.format(num_layers), heads,
pretrained=False,
#pretrained=True,
down_ratio=down_ratio,
final_kernel=1,
last_level=5,
head_conv=head_conv,num_input=num_input)
return model
|
banmo-main
|
third_party/vcnplus/models/networks/pose_dla_dcn.py
|
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao (Bin.Xiao@microsoft.com)
# Modified by Xingyi Zhou
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import torch
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
BN_MOMENTUM = 0.1
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1,
bias=False)
self.bn3 = nn.BatchNorm2d(planes * self.expansion,
momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class PoseResNet(nn.Module):
def __init__(self, block, layers, heads, head_conv, **kwargs):
self.inplanes = 64
self.deconv_with_bias = False
self.heads = heads
super(PoseResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
# used for deconv layers
self.deconv_layers = self._make_deconv_layer(
3,
[256, 256, 256],
[4, 4, 4],
)
# self.final_layer = []
for head in sorted(self.heads):
num_output = self.heads[head]
if head_conv > 0:
fc = nn.Sequential(
nn.Conv2d(256, head_conv,
kernel_size=3, padding=1, bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(head_conv, num_output,
kernel_size=1, stride=1, padding=0))
else:
fc = nn.Conv2d(
in_channels=256,
out_channels=num_output,
kernel_size=1,
stride=1,
padding=0
)
self.__setattr__(head, fc)
# self.final_layer = nn.ModuleList(self.final_layer)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion, momentum=BN_MOMENTUM),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def _get_deconv_cfg(self, deconv_kernel, index):
if deconv_kernel == 4:
padding = 1
output_padding = 0
elif deconv_kernel == 3:
padding = 1
output_padding = 1
elif deconv_kernel == 2:
padding = 0
output_padding = 0
return deconv_kernel, padding, output_padding
def _make_deconv_layer(self, num_layers, num_filters, num_kernels):
assert num_layers == len(num_filters), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
assert num_layers == len(num_kernels), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
layers = []
for i in range(num_layers):
kernel, padding, output_padding = \
self._get_deconv_cfg(num_kernels[i], i)
planes = num_filters[i]
layers.append(
nn.ConvTranspose2d(
in_channels=self.inplanes,
out_channels=planes,
kernel_size=kernel,
stride=2,
padding=padding,
output_padding=output_padding,
bias=self.deconv_with_bias))
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
self.inplanes = planes
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.deconv_layers(x)
ret = {}
for head in self.heads:
ret[head] = self.__getattr__(head)(x)
return [ret]
def init_weights(self, num_layers, pretrained=True):
if pretrained:
# print('=> init resnet deconv weights from normal distribution')
for _, m in self.deconv_layers.named_modules():
if isinstance(m, nn.ConvTranspose2d):
# print('=> init {}.weight as normal(0, 0.001)'.format(name))
# print('=> init {}.bias as 0'.format(name))
nn.init.normal_(m.weight, std=0.001)
if self.deconv_with_bias:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
# print('=> init {}.weight as 1'.format(name))
# print('=> init {}.bias as 0'.format(name))
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# print('=> init final conv weights from normal distribution')
for head in self.heads:
final_layer = self.__getattr__(head)
for i, m in enumerate(final_layer.modules()):
if isinstance(m, nn.Conv2d):
# nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
# print('=> init {}.weight as normal(0, 0.001)'.format(name))
# print('=> init {}.bias as 0'.format(name))
if m.weight.shape[0] == self.heads[head]:
if 'hm' in head:
nn.init.constant_(m.bias, -2.19)
else:
nn.init.normal_(m.weight, std=0.001)
nn.init.constant_(m.bias, 0)
#pretrained_state_dict = torch.load(pretrained)
url = model_urls['resnet{}'.format(num_layers)]
pretrained_state_dict = model_zoo.load_url(url)
print('=> loading pretrained model {}'.format(url))
self.load_state_dict(pretrained_state_dict, strict=False)
else:
print('=> imagenet pretrained model dose not exist')
print('=> please download it first')
raise ValueError('imagenet pretrained model does not exist')
resnet_spec = {18: (BasicBlock, [2, 2, 2, 2]),
34: (BasicBlock, [3, 4, 6, 3]),
50: (Bottleneck, [3, 4, 6, 3]),
101: (Bottleneck, [3, 4, 23, 3]),
152: (Bottleneck, [3, 8, 36, 3])}
def get_pose_net(num_layers, heads, head_conv):
block_class, layers = resnet_spec[num_layers]
model = PoseResNet(block_class, layers, heads, head_conv=head_conv)
model.init_weights(num_layers, pretrained=True)
return model
|
banmo-main
|
third_party/vcnplus/models/networks/msra_resnet.py
|
# ------------------------------------------------------------------------------
# This code is base on
# CornerNet (https://github.com/princeton-vl/CornerNet)
# Copyright (c) 2018, University of Michigan
# Licensed under the BSD 3-Clause License
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import torch
import torch.nn as nn
class convolution(nn.Module):
def __init__(self, k, inp_dim, out_dim, stride=1, with_bn=True):
super(convolution, self).__init__()
pad = (k - 1) // 2
self.conv = nn.Conv2d(inp_dim, out_dim, (k, k), padding=(pad, pad), stride=(stride, stride), bias=not with_bn)
self.bn = nn.BatchNorm2d(out_dim) if with_bn else nn.Sequential()
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
conv = self.conv(x)
bn = self.bn(conv)
relu = self.relu(bn)
return relu
class fully_connected(nn.Module):
def __init__(self, inp_dim, out_dim, with_bn=True):
super(fully_connected, self).__init__()
self.with_bn = with_bn
self.linear = nn.Linear(inp_dim, out_dim)
if self.with_bn:
self.bn = nn.BatchNorm1d(out_dim)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
linear = self.linear(x)
bn = self.bn(linear) if self.with_bn else linear
relu = self.relu(bn)
return relu
class residual(nn.Module):
def __init__(self, k, inp_dim, out_dim, stride=1, with_bn=True):
super(residual, self).__init__()
self.conv1 = nn.Conv2d(inp_dim, out_dim, (3, 3), padding=(1, 1), stride=(stride, stride), bias=False)
self.bn1 = nn.BatchNorm2d(out_dim)
self.relu1 = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(out_dim, out_dim, (3, 3), padding=(1, 1), bias=False)
self.bn2 = nn.BatchNorm2d(out_dim)
self.skip = nn.Sequential(
nn.Conv2d(inp_dim, out_dim, (1, 1), stride=(stride, stride), bias=False),
nn.BatchNorm2d(out_dim)
) if stride != 1 or inp_dim != out_dim else nn.Sequential()
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
conv1 = self.conv1(x)
bn1 = self.bn1(conv1)
relu1 = self.relu1(bn1)
conv2 = self.conv2(relu1)
bn2 = self.bn2(conv2)
skip = self.skip(x)
return self.relu(bn2 + skip)
def make_layer(k, inp_dim, out_dim, modules, layer=convolution, **kwargs):
layers = [layer(k, inp_dim, out_dim, **kwargs)]
for _ in range(1, modules):
layers.append(layer(k, out_dim, out_dim, **kwargs))
return nn.Sequential(*layers)
def make_layer_revr(k, inp_dim, out_dim, modules, layer=convolution, **kwargs):
layers = []
for _ in range(modules - 1):
layers.append(layer(k, inp_dim, inp_dim, **kwargs))
layers.append(layer(k, inp_dim, out_dim, **kwargs))
return nn.Sequential(*layers)
class MergeUp(nn.Module):
def forward(self, up1, up2):
return up1 + up2
def make_merge_layer(dim):
return MergeUp()
# def make_pool_layer(dim):
# return nn.MaxPool2d(kernel_size=2, stride=2)
def make_pool_layer(dim):
return nn.Sequential()
def make_unpool_layer(dim):
return nn.Upsample(scale_factor=2)
def make_kp_layer(cnv_dim, curr_dim, out_dim):
return nn.Sequential(
convolution(3, cnv_dim, curr_dim, with_bn=False),
nn.Conv2d(curr_dim, out_dim, (1, 1))
)
def make_inter_layer(dim):
return residual(3, dim, dim)
def make_cnv_layer(inp_dim, out_dim):
return convolution(3, inp_dim, out_dim)
class kp_module(nn.Module):
def __init__(
self, n, dims, modules, layer=residual,
make_up_layer=make_layer, make_low_layer=make_layer,
make_hg_layer=make_layer, make_hg_layer_revr=make_layer_revr,
make_pool_layer=make_pool_layer, make_unpool_layer=make_unpool_layer,
make_merge_layer=make_merge_layer, **kwargs
):
super(kp_module, self).__init__()
self.n = n
curr_mod = modules[0]
next_mod = modules[1]
curr_dim = dims[0]
next_dim = dims[1]
self.up1 = make_up_layer(
3, curr_dim, curr_dim, curr_mod,
layer=layer, **kwargs
)
self.max1 = make_pool_layer(curr_dim)
self.low1 = make_hg_layer(
3, curr_dim, next_dim, curr_mod,
layer=layer, **kwargs
)
self.low2 = kp_module(
n - 1, dims[1:], modules[1:], layer=layer,
make_up_layer=make_up_layer,
make_low_layer=make_low_layer,
make_hg_layer=make_hg_layer,
make_hg_layer_revr=make_hg_layer_revr,
make_pool_layer=make_pool_layer,
make_unpool_layer=make_unpool_layer,
make_merge_layer=make_merge_layer,
**kwargs
) if self.n > 1 else \
make_low_layer(
3, next_dim, next_dim, next_mod,
layer=layer, **kwargs
)
self.low3 = make_hg_layer_revr(
3, next_dim, curr_dim, curr_mod,
layer=layer, **kwargs
)
self.up2 = make_unpool_layer(curr_dim)
self.merge = make_merge_layer(curr_dim)
def forward(self, x):
up1 = self.up1(x)
max1 = self.max1(x)
low1 = self.low1(max1)
low2 = self.low2(low1)
low3 = self.low3(low2)
up2 = self.up2(low3)
return self.merge(up1, up2)
class exkp(nn.Module):
def __init__(
self, n, nstack, dims, modules, heads, pre=None, cnv_dim=256,
make_tl_layer=None, make_br_layer=None,
make_cnv_layer=make_cnv_layer, make_heat_layer=make_kp_layer,
make_tag_layer=make_kp_layer, make_regr_layer=make_kp_layer,
make_up_layer=make_layer, make_low_layer=make_layer,
make_hg_layer=make_layer, make_hg_layer_revr=make_layer_revr,
make_pool_layer=make_pool_layer, make_unpool_layer=make_unpool_layer,
make_merge_layer=make_merge_layer, make_inter_layer=make_inter_layer,
kp_layer=residual
):
super(exkp, self).__init__()
self.nstack = nstack
self.heads = heads
curr_dim = dims[0]
self.pre = nn.Sequential(
convolution(7, 3, 128, stride=2),
residual(3, 128, 256, stride=2)
) if pre is None else pre
self.kps = nn.ModuleList([
kp_module(
n, dims, modules, layer=kp_layer,
make_up_layer=make_up_layer,
make_low_layer=make_low_layer,
make_hg_layer=make_hg_layer,
make_hg_layer_revr=make_hg_layer_revr,
make_pool_layer=make_pool_layer,
make_unpool_layer=make_unpool_layer,
make_merge_layer=make_merge_layer
) for _ in range(nstack)
])
self.cnvs = nn.ModuleList([
make_cnv_layer(curr_dim, cnv_dim) for _ in range(nstack)
])
self.inters = nn.ModuleList([
make_inter_layer(curr_dim) for _ in range(nstack - 1)
])
self.inters_ = nn.ModuleList([
nn.Sequential(
nn.Conv2d(curr_dim, curr_dim, (1, 1), bias=False),
nn.BatchNorm2d(curr_dim)
) for _ in range(nstack - 1)
])
self.cnvs_ = nn.ModuleList([
nn.Sequential(
nn.Conv2d(cnv_dim, curr_dim, (1, 1), bias=False),
nn.BatchNorm2d(curr_dim)
) for _ in range(nstack - 1)
])
## keypoint heatmaps
for head in heads.keys():
if 'hm' in head:
module = nn.ModuleList([
make_heat_layer(
cnv_dim, curr_dim, heads[head]) for _ in range(nstack)
])
self.__setattr__(head, module)
for heat in self.__getattr__(head):
heat[-1].bias.data.fill_(-2.19)
else:
module = nn.ModuleList([
make_regr_layer(
cnv_dim, curr_dim, heads[head]) for _ in range(nstack)
])
self.__setattr__(head, module)
self.relu = nn.ReLU(inplace=True)
def forward(self, image):
# print('image shape', image.shape)
inter = self.pre(image)
outs = []
for ind in range(self.nstack):
kp_, cnv_ = self.kps[ind], self.cnvs[ind]
kp = kp_(inter)
cnv = cnv_(kp)
out = {}
for head in self.heads:
layer = self.__getattr__(head)[ind]
y = layer(cnv)
out[head] = y
outs.append(out)
if ind < self.nstack - 1:
inter = self.inters_[ind](inter) + self.cnvs_[ind](cnv)
inter = self.relu(inter)
inter = self.inters[ind](inter)
return outs
def make_hg_layer(kernel, dim0, dim1, mod, layer=convolution, **kwargs):
layers = [layer(kernel, dim0, dim1, stride=2)]
layers += [layer(kernel, dim1, dim1) for _ in range(mod - 1)]
return nn.Sequential(*layers)
class HourglassNet(exkp):
def __init__(self, heads, num_stacks=2):
n = 5
dims = [256, 256, 384, 384, 384, 512]
modules = [2, 2, 2, 2, 2, 4]
super(HourglassNet, self).__init__(
n, num_stacks, dims, modules, heads,
make_tl_layer=None,
make_br_layer=None,
make_pool_layer=make_pool_layer,
make_hg_layer=make_hg_layer,
kp_layer=residual, cnv_dim=256
)
def get_large_hourglass_net(num_layers, heads, head_conv):
model = HourglassNet(heads, 2)
return model
|
banmo-main
|
third_party/vcnplus/models/networks/large_hourglass.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from os.path import join
import torch
from torch import nn
import torch.utils.model_zoo as model_zoo
import numpy as np
BatchNorm = nn.BatchNorm2d
def get_model_url(data='imagenet', name='dla34', hash='ba72cf86'):
return join('http://dl.yf.io/dla/models', data, '{}-{}.pth'.format(name, hash))
def conv3x3(in_planes, out_planes, stride=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
def __init__(self, inplanes, planes, stride=1, dilation=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3,
stride=stride, padding=dilation,
bias=False, dilation=dilation)
self.bn1 = BatchNorm(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
stride=1, padding=dilation,
bias=False, dilation=dilation)
self.bn2 = BatchNorm(planes)
self.stride = stride
def forward(self, x, residual=None):
if residual is None:
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 2
def __init__(self, inplanes, planes, stride=1, dilation=1):
super(Bottleneck, self).__init__()
expansion = Bottleneck.expansion
bottle_planes = planes // expansion
self.conv1 = nn.Conv2d(inplanes, bottle_planes,
kernel_size=1, bias=False)
self.bn1 = BatchNorm(bottle_planes)
self.conv2 = nn.Conv2d(bottle_planes, bottle_planes, kernel_size=3,
stride=stride, padding=dilation,
bias=False, dilation=dilation)
self.bn2 = BatchNorm(bottle_planes)
self.conv3 = nn.Conv2d(bottle_planes, planes,
kernel_size=1, bias=False)
self.bn3 = BatchNorm(planes)
self.relu = nn.ReLU(inplace=True)
self.stride = stride
def forward(self, x, residual=None):
if residual is None:
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
out += residual
out = self.relu(out)
return out
class BottleneckX(nn.Module):
expansion = 2
cardinality = 32
def __init__(self, inplanes, planes, stride=1, dilation=1):
super(BottleneckX, self).__init__()
cardinality = BottleneckX.cardinality
# dim = int(math.floor(planes * (BottleneckV5.expansion / 64.0)))
# bottle_planes = dim * cardinality
bottle_planes = planes * cardinality // 32
self.conv1 = nn.Conv2d(inplanes, bottle_planes,
kernel_size=1, bias=False)
self.bn1 = BatchNorm(bottle_planes)
self.conv2 = nn.Conv2d(bottle_planes, bottle_planes, kernel_size=3,
stride=stride, padding=dilation, bias=False,
dilation=dilation, groups=cardinality)
self.bn2 = BatchNorm(bottle_planes)
self.conv3 = nn.Conv2d(bottle_planes, planes,
kernel_size=1, bias=False)
self.bn3 = BatchNorm(planes)
self.relu = nn.ReLU(inplace=True)
self.stride = stride
def forward(self, x, residual=None):
if residual is None:
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
out += residual
out = self.relu(out)
return out
class Root(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, residual):
super(Root, self).__init__()
self.conv = nn.Conv2d(
in_channels, out_channels, 1,
stride=1, bias=False, padding=(kernel_size - 1) // 2)
self.bn = BatchNorm(out_channels)
self.relu = nn.ReLU(inplace=True)
self.residual = residual
def forward(self, *x):
children = x
x = self.conv(torch.cat(x, 1))
x = self.bn(x)
if self.residual:
x += children[0]
x = self.relu(x)
return x
class Tree(nn.Module):
def __init__(self, levels, block, in_channels, out_channels, stride=1,
level_root=False, root_dim=0, root_kernel_size=1,
dilation=1, root_residual=False):
super(Tree, self).__init__()
if root_dim == 0:
root_dim = 2 * out_channels
if level_root:
root_dim += in_channels
if levels == 1:
self.tree1 = block(in_channels, out_channels, stride,
dilation=dilation)
self.tree2 = block(out_channels, out_channels, 1,
dilation=dilation)
else:
self.tree1 = Tree(levels - 1, block, in_channels, out_channels,
stride, root_dim=0,
root_kernel_size=root_kernel_size,
dilation=dilation, root_residual=root_residual)
self.tree2 = Tree(levels - 1, block, out_channels, out_channels,
root_dim=root_dim + out_channels,
root_kernel_size=root_kernel_size,
dilation=dilation, root_residual=root_residual)
if levels == 1:
self.root = Root(root_dim, out_channels, root_kernel_size,
root_residual)
self.level_root = level_root
self.root_dim = root_dim
self.downsample = None
self.project = None
self.levels = levels
if stride > 1:
self.downsample = nn.MaxPool2d(stride, stride=stride)
if in_channels != out_channels:
self.project = nn.Sequential(
nn.Conv2d(in_channels, out_channels,
kernel_size=1, stride=1, bias=False),
BatchNorm(out_channels)
)
def forward(self, x, residual=None, children=None):
children = [] if children is None else children
bottom = self.downsample(x) if self.downsample else x
residual = self.project(bottom) if self.project else bottom
if self.level_root:
children.append(bottom)
x1 = self.tree1(x, residual)
if self.levels == 1:
x2 = self.tree2(x1)
x = self.root(x2, x1, *children)
else:
children.append(x1)
x = self.tree2(x1, children=children)
return x
class DLA(nn.Module):
def __init__(self, levels, channels, num_classes=1000,
block=BasicBlock, residual_root=False, return_levels=False,
pool_size=7, linear_root=False):
super(DLA, self).__init__()
self.channels = channels
self.return_levels = return_levels
self.num_classes = num_classes
self.base_layer = nn.Sequential(
nn.Conv2d(3, channels[0], kernel_size=7, stride=1,
padding=3, bias=False),
BatchNorm(channels[0]),
nn.ReLU(inplace=True))
self.level0 = self._make_conv_level(
channels[0], channels[0], levels[0])
self.level1 = self._make_conv_level(
channels[0], channels[1], levels[1], stride=2)
self.level2 = Tree(levels[2], block, channels[1], channels[2], 2,
level_root=False,
root_residual=residual_root)
self.level3 = Tree(levels[3], block, channels[2], channels[3], 2,
level_root=True, root_residual=residual_root)
self.level4 = Tree(levels[4], block, channels[3], channels[4], 2,
level_root=True, root_residual=residual_root)
self.level5 = Tree(levels[5], block, channels[4], channels[5], 2,
level_root=True, root_residual=residual_root)
self.avgpool = nn.AvgPool2d(pool_size)
self.fc = nn.Conv2d(channels[-1], num_classes, kernel_size=1,
stride=1, padding=0, bias=True)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, BatchNorm):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_level(self, block, inplanes, planes, blocks, stride=1):
downsample = None
if stride != 1 or inplanes != planes:
downsample = nn.Sequential(
nn.MaxPool2d(stride, stride=stride),
nn.Conv2d(inplanes, planes,
kernel_size=1, stride=1, bias=False),
BatchNorm(planes),
)
layers = []
layers.append(block(inplanes, planes, stride, downsample=downsample))
for i in range(1, blocks):
layers.append(block(inplanes, planes))
return nn.Sequential(*layers)
def _make_conv_level(self, inplanes, planes, convs, stride=1, dilation=1):
modules = []
for i in range(convs):
modules.extend([
nn.Conv2d(inplanes, planes, kernel_size=3,
stride=stride if i == 0 else 1,
padding=dilation, bias=False, dilation=dilation),
BatchNorm(planes),
nn.ReLU(inplace=True)])
inplanes = planes
return nn.Sequential(*modules)
def forward(self, x):
y = []
x = self.base_layer(x)
for i in range(6):
x = getattr(self, 'level{}'.format(i))(x)
y.append(x)
if self.return_levels:
return y
else:
x = self.avgpool(x)
x = self.fc(x)
x = x.view(x.size(0), -1)
return x
def load_pretrained_model(self, data='imagenet', name='dla34', hash='ba72cf86'):
fc = self.fc
if name.endswith('.pth'):
model_weights = torch.load(data + name)
else:
model_url = get_model_url(data, name, hash)
model_weights = model_zoo.load_url(model_url)
num_classes = len(model_weights[list(model_weights.keys())[-1]])
self.fc = nn.Conv2d(
self.channels[-1], num_classes,
kernel_size=1, stride=1, padding=0, bias=True)
self.load_state_dict(model_weights)
self.fc = fc
def dla34(pretrained, **kwargs): # DLA-34
model = DLA([1, 1, 1, 2, 2, 1],
[16, 32, 64, 128, 256, 512],
block=BasicBlock, **kwargs)
if pretrained:
model.load_pretrained_model(data='imagenet', name='dla34', hash='ba72cf86')
return model
def dla46_c(pretrained=None, **kwargs): # DLA-46-C
Bottleneck.expansion = 2
model = DLA([1, 1, 1, 2, 2, 1],
[16, 32, 64, 64, 128, 256],
block=Bottleneck, **kwargs)
if pretrained is not None:
model.load_pretrained_model(pretrained, 'dla46_c')
return model
def dla46x_c(pretrained=None, **kwargs): # DLA-X-46-C
BottleneckX.expansion = 2
model = DLA([1, 1, 1, 2, 2, 1],
[16, 32, 64, 64, 128, 256],
block=BottleneckX, **kwargs)
if pretrained is not None:
model.load_pretrained_model(pretrained, 'dla46x_c')
return model
def dla60x_c(pretrained, **kwargs): # DLA-X-60-C
BottleneckX.expansion = 2
model = DLA([1, 1, 1, 2, 3, 1],
[16, 32, 64, 64, 128, 256],
block=BottleneckX, **kwargs)
if pretrained:
model.load_pretrained_model(data='imagenet', name='dla60x_c', hash='b870c45c')
return model
def dla60(pretrained=None, **kwargs): # DLA-60
Bottleneck.expansion = 2
model = DLA([1, 1, 1, 2, 3, 1],
[16, 32, 128, 256, 512, 1024],
block=Bottleneck, **kwargs)
if pretrained is not None:
model.load_pretrained_model(pretrained, 'dla60')
return model
def dla60x(pretrained=None, **kwargs): # DLA-X-60
BottleneckX.expansion = 2
model = DLA([1, 1, 1, 2, 3, 1],
[16, 32, 128, 256, 512, 1024],
block=BottleneckX, **kwargs)
if pretrained is not None:
model.load_pretrained_model(pretrained, 'dla60x')
return model
def dla102(pretrained=None, **kwargs): # DLA-102
Bottleneck.expansion = 2
model = DLA([1, 1, 1, 3, 4, 1], [16, 32, 128, 256, 512, 1024],
block=Bottleneck, residual_root=True, **kwargs)
if pretrained is not None:
model.load_pretrained_model(pretrained, 'dla102')
return model
def dla102x(pretrained=None, **kwargs): # DLA-X-102
BottleneckX.expansion = 2
model = DLA([1, 1, 1, 3, 4, 1], [16, 32, 128, 256, 512, 1024],
block=BottleneckX, residual_root=True, **kwargs)
if pretrained is not None:
model.load_pretrained_model(pretrained, 'dla102x')
return model
def dla102x2(pretrained=None, **kwargs): # DLA-X-102 64
BottleneckX.cardinality = 64
model = DLA([1, 1, 1, 3, 4, 1], [16, 32, 128, 256, 512, 1024],
block=BottleneckX, residual_root=True, **kwargs)
if pretrained is not None:
model.load_pretrained_model(pretrained, 'dla102x2')
return model
def dla169(pretrained=None, **kwargs): # DLA-169
Bottleneck.expansion = 2
model = DLA([1, 1, 2, 3, 5, 1], [16, 32, 128, 256, 512, 1024],
block=Bottleneck, residual_root=True, **kwargs)
if pretrained is not None:
model.load_pretrained_model(pretrained, 'dla169')
return model
def set_bn(bn):
global BatchNorm
BatchNorm = bn
dla.BatchNorm = bn
class Identity(nn.Module):
def __init__(self):
super(Identity, self).__init__()
def forward(self, x):
return x
def fill_up_weights(up):
w = up.weight.data
f = math.ceil(w.size(2) / 2)
c = (2 * f - 1 - f % 2) / (2. * f)
for i in range(w.size(2)):
for j in range(w.size(3)):
w[0, 0, i, j] = \
(1 - math.fabs(i / f - c)) * (1 - math.fabs(j / f - c))
for c in range(1, w.size(0)):
w[c, 0, :, :] = w[0, 0, :, :]
class IDAUp(nn.Module):
def __init__(self, node_kernel, out_dim, channels, up_factors):
super(IDAUp, self).__init__()
self.channels = channels
self.out_dim = out_dim
for i, c in enumerate(channels):
if c == out_dim:
proj = Identity()
else:
proj = nn.Sequential(
nn.Conv2d(c, out_dim,
kernel_size=1, stride=1, bias=False),
BatchNorm(out_dim),
nn.ReLU(inplace=True))
f = int(up_factors[i])
if f == 1:
up = Identity()
else:
up = nn.ConvTranspose2d(
out_dim, out_dim, f * 2, stride=f, padding=f // 2,
output_padding=0, groups=out_dim, bias=False)
fill_up_weights(up)
setattr(self, 'proj_' + str(i), proj)
setattr(self, 'up_' + str(i), up)
for i in range(1, len(channels)):
node = nn.Sequential(
nn.Conv2d(out_dim * 2, out_dim,
kernel_size=node_kernel, stride=1,
padding=node_kernel // 2, bias=False),
BatchNorm(out_dim),
nn.ReLU(inplace=True))
setattr(self, 'node_' + str(i), node)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, BatchNorm):
m.weight.data.fill_(1)
m.bias.data.zero_()
def forward(self, layers):
assert len(self.channels) == len(layers), \
'{} vs {} layers'.format(len(self.channels), len(layers))
layers = list(layers)
for i, l in enumerate(layers):
upsample = getattr(self, 'up_' + str(i))
project = getattr(self, 'proj_' + str(i))
layers[i] = upsample(project(l))
x = layers[0]
y = []
for i in range(1, len(layers)):
node = getattr(self, 'node_' + str(i))
x = node(torch.cat([x, layers[i]], 1))
y.append(x)
return x, y
class DLAUp(nn.Module):
def __init__(self, channels, scales=(1, 2, 4, 8, 16), in_channels=None):
super(DLAUp, self).__init__()
if in_channels is None:
in_channels = channels
self.channels = channels
channels = list(channels)
scales = np.array(scales, dtype=int)
for i in range(len(channels) - 1):
j = -i - 2
setattr(self, 'ida_{}'.format(i),
IDAUp(3, channels[j], in_channels[j:],
scales[j:] // scales[j]))
scales[j + 1:] = scales[j]
in_channels[j + 1:] = [channels[j] for _ in channels[j + 1:]]
def forward(self, layers):
layers = list(layers)
assert len(layers) > 1
for i in range(len(layers) - 1):
ida = getattr(self, 'ida_{}'.format(i))
x, y = ida(layers[-i - 2:])
layers[-i - 1:] = y
return x
def fill_fc_weights(layers):
for m in layers.modules():
if isinstance(m, nn.Conv2d):
nn.init.normal_(m.weight, std=0.001)
# torch.nn.init.kaiming_normal_(m.weight.data, nonlinearity='relu')
# torch.nn.init.xavier_normal_(m.weight.data)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
class DLASeg(nn.Module):
def __init__(self, base_name, heads,
pretrained=True, down_ratio=4, head_conv=256):
super(DLASeg, self).__init__()
assert down_ratio in [2, 4, 8, 16]
self.heads = heads
self.first_level = int(np.log2(down_ratio))
self.base = globals()[base_name](
pretrained=pretrained, return_levels=True)
channels = self.base.channels
scales = [2 ** i for i in range(len(channels[self.first_level:]))]
self.dla_up = DLAUp(channels[self.first_level:], scales=scales)
'''
self.fc = nn.Sequential(
nn.Conv2d(channels[self.first_level], classes, kernel_size=1,
stride=1, padding=0, bias=True)
)
'''
for head in self.heads:
classes = self.heads[head]
if head_conv > 0:
fc = nn.Sequential(
nn.Conv2d(channels[self.first_level], head_conv,
kernel_size=3, padding=1, bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(head_conv, classes,
kernel_size=1, stride=1,
padding=0, bias=True))
if 'hm' in head:
fc[-1].bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
else:
fc = nn.Conv2d(channels[self.first_level], classes,
kernel_size=1, stride=1,
padding=0, bias=True)
if 'hm' in head:
fc.bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
self.__setattr__(head, fc)
'''
up_factor = 2 ** self.first_level
if up_factor > 1:
up = nn.ConvTranspose2d(classes, classes, up_factor * 2,
stride=up_factor, padding=up_factor // 2,
output_padding=0, groups=classes,
bias=False)
fill_up_weights(up)
up.weight.requires_grad = False
else:
up = Identity()
self.up = up
self.softmax = nn.LogSoftmax(dim=1)
for m in self.fc.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, BatchNorm):
m.weight.data.fill_(1)
m.bias.data.zero_()
'''
def forward(self, x):
x = self.base(x)
x = self.dla_up(x[self.first_level:])
# x = self.fc(x)
# y = self.softmax(self.up(x))
ret = {}
for head in self.heads:
ret[head] = self.__getattr__(head)(x)
return [ret]
'''
def optim_parameters(self, memo=None):
for param in self.base.parameters():
yield param
for param in self.dla_up.parameters():
yield param
for param in self.fc.parameters():
yield param
'''
'''
def dla34up(classes, pretrained_base=None, **kwargs):
model = DLASeg('dla34', classes, pretrained_base=pretrained_base, **kwargs)
return model
def dla60up(classes, pretrained_base=None, **kwargs):
model = DLASeg('dla60', classes, pretrained_base=pretrained_base, **kwargs)
return model
def dla102up(classes, pretrained_base=None, **kwargs):
model = DLASeg('dla102', classes,
pretrained_base=pretrained_base, **kwargs)
return model
def dla169up(classes, pretrained_base=None, **kwargs):
model = DLASeg('dla169', classes,
pretrained_base=pretrained_base, **kwargs)
return model
'''
def get_pose_net(num_layers, heads, head_conv=256, down_ratio=4):
model = DLASeg('dla{}'.format(num_layers), heads,
pretrained=True,
down_ratio=down_ratio,
head_conv=head_conv)
return model
|
banmo-main
|
third_party/vcnplus/models/networks/dlav0.py
|
#!/usr/bin/env python
import os
import glob
import torch
from torch.utils.cpp_extension import CUDA_HOME
from torch.utils.cpp_extension import CppExtension
from torch.utils.cpp_extension import CUDAExtension
from setuptools import find_packages
from setuptools import setup
requirements = ["torch", "torchvision"]
def get_extensions():
this_dir = os.path.dirname(os.path.abspath(__file__))
extensions_dir = os.path.join(this_dir, "DCN", "src")
main_file = glob.glob(os.path.join(extensions_dir, "*.cpp"))
source_cpu = glob.glob(os.path.join(extensions_dir, "cpu", "*.cpp"))
source_cuda = glob.glob(os.path.join(extensions_dir, "cuda", "*.cu"))
#os.environ["CC"] = "g++"
sources = main_file + source_cpu
extension = CppExtension
extra_compile_args = {'cxx': ['-std=c++14']}
define_macros = []
#if torch.cuda.is_available() and CUDA_HOME is not None:
if torch.cuda.is_available():
extension = CUDAExtension
sources += source_cuda
define_macros += [("WITH_CUDA", None)]
extra_compile_args["nvcc"] = [
"-DCUDA_HAS_FP16=1",
"-D__CUDA_NO_HALF_OPERATORS__",
"-D__CUDA_NO_HALF_CONVERSIONS__",
"-D__CUDA_NO_HALF2_OPERATORS__",
]
else:
#raise NotImplementedError('Cuda is not available')
pass
sources = [os.path.join(extensions_dir, s) for s in sources]
include_dirs = [extensions_dir]
ext_modules = [
extension(
"_ext",
sources,
include_dirs=include_dirs,
define_macros=define_macros,
extra_compile_args=extra_compile_args,
)
]
return ext_modules
setup(
name="DCNv2",
version="0.1",
author="charlesshang",
url="https://github.com/charlesshang/DCNv2",
description="deformable convolutional networks",
packages=find_packages(exclude=("configs", "tests",)),
# install_requires=requirements,
ext_modules=get_extensions(),
cmdclass={"build_ext": torch.utils.cpp_extension.BuildExtension},
)
|
banmo-main
|
third_party/vcnplus/models/networks/DCNv2/setup.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import time
import torch
import torch.nn as nn
from torch.autograd import gradcheck
from dcn_v2 import dcn_v2_conv, DCNv2, DCN
from dcn_v2 import dcn_v2_pooling, DCNv2Pooling, DCNPooling
deformable_groups = 1
N, inC, inH, inW = 2, 2, 4, 4
outC = 2
kH, kW = 3, 3
def conv_identify(weight, bias):
weight.data.zero_()
bias.data.zero_()
o, i, h, w = weight.shape
y = h//2
x = w//2
for p in range(i):
for q in range(o):
if p == q:
weight.data[q, p, y, x] = 1.0
def check_zero_offset():
conv_offset = nn.Conv2d(inC, deformable_groups * 2 * kH * kW,
kernel_size=(kH, kW),
stride=(1, 1),
padding=(1, 1),
bias=True)
conv_mask = nn.Conv2d(inC, deformable_groups * 1 * kH * kW,
kernel_size=(kH, kW),
stride=(1, 1),
padding=(1, 1),
bias=True)
dcn_v2 = DCNv2(inC, outC, (kH, kW),
stride=1, padding=1, dilation=1,
deformable_groups=deformable_groups)
conv_offset.weight.data.zero_()
conv_offset.bias.data.zero_()
conv_mask.weight.data.zero_()
conv_mask.bias.data.zero_()
conv_identify(dcn_v2.weight, dcn_v2.bias)
input = torch.randn(N, inC, inH, inW)
offset = conv_offset(input)
mask = conv_mask(input)
mask = torch.sigmoid(mask)
output = dcn_v2(input, offset, mask)
output *= 2
d = (input - output).abs().max()
if d < 1e-10:
print('Zero offset passed')
else:
print('Zero offset failed')
print(input)
print(output)
def check_gradient_dconv():
input = torch.rand(N, inC, inH, inW) * 0.01
input.requires_grad = True
offset = torch.randn(N, deformable_groups * 2 * kW * kH, inH, inW) * 2
# offset.data.zero_()
# offset.data -= 0.5
offset.requires_grad = True
mask = torch.rand(N, deformable_groups * 1 * kW * kH, inH, inW)
# mask.data.zero_()
mask.requires_grad = True
mask = torch.sigmoid(mask)
weight = torch.randn(outC, inC, kH, kW)
weight.requires_grad = True
bias = torch.rand(outC)
bias.requires_grad = True
stride = 1
padding = 1
dilation = 1
print('check_gradient_dconv: ',
gradcheck(dcn_v2_conv, (input, offset, mask, weight, bias,
stride, padding, dilation, deformable_groups),
eps=1e-3, atol=1e-4, rtol=1e-2))
def check_pooling_zero_offset():
input = torch.randn(2, 16, 64, 64).zero_()
input[0, :, 16:26, 16:26] = 1.
input[1, :, 10:20, 20:30] = 2.
rois = torch.tensor([
[0, 65, 65, 103, 103],
[1, 81, 41, 119, 79],
]).float()
pooling = DCNv2Pooling(spatial_scale=1.0 / 4,
pooled_size=7,
output_dim=16,
no_trans=True,
group_size=1,
trans_std=0.0)
out = pooling(input, rois, input.new())
s = ', '.join(['%f' % out[i, :, :, :].mean().item()
for i in range(rois.shape[0])])
print(s)
dpooling = DCNv2Pooling(spatial_scale=1.0 / 4,
pooled_size=7,
output_dim=16,
no_trans=False,
group_size=1,
trans_std=0.0)
offset = torch.randn(20, 2, 7, 7).zero_()
dout = dpooling(input, rois, offset)
s = ', '.join(['%f' % dout[i, :, :, :].mean().item()
for i in range(rois.shape[0])])
print(s)
def check_gradient_dpooling():
input = torch.randn(2, 3, 5, 5) * 0.01
N = 4
batch_inds = torch.randint(2, (N, 1)).float()
x = torch.rand((N, 1)).float() * 15
y = torch.rand((N, 1)).float() * 15
w = torch.rand((N, 1)).float() * 10
h = torch.rand((N, 1)).float() * 10
rois = torch.cat((batch_inds, x, y, x + w, y + h), dim=1)
offset = torch.randn(N, 2, 3, 3)
input.requires_grad = True
offset.requires_grad = True
spatial_scale = 1.0 / 4
pooled_size = 3
output_dim = 3
no_trans = 0
group_size = 1
trans_std = 0.0
sample_per_part = 4
part_size = pooled_size
print('check_gradient_dpooling:',
gradcheck(dcn_v2_pooling, (input, rois, offset,
spatial_scale,
pooled_size,
output_dim,
no_trans,
group_size,
part_size,
sample_per_part,
trans_std),
eps=1e-4))
def example_dconv():
input = torch.randn(2, 64, 128, 128)
# wrap all things (offset and mask) in DCN
dcn = DCN(64, 64, kernel_size=(3, 3), stride=1,
padding=1, deformable_groups=2)
# print(dcn.weight.shape, input.shape)
output = dcn(input)
targert = output.new(*output.size())
targert.data.uniform_(-0.01, 0.01)
error = (targert - output).mean()
error.backward()
print(output.shape)
def example_dpooling():
input = torch.randn(2, 32, 64, 64)
batch_inds = torch.randint(2, (20, 1)).float()
x = torch.randint(256, (20, 1)).float()
y = torch.randint(256, (20, 1)).float()
w = torch.randint(64, (20, 1)).float()
h = torch.randint(64, (20, 1)).float()
rois = torch.cat((batch_inds, x, y, x + w, y + h), dim=1)
offset = torch.randn(20, 2, 7, 7)
input.requires_grad = True
offset.requires_grad = True
# normal roi_align
pooling = DCNv2Pooling(spatial_scale=1.0 / 4,
pooled_size=7,
output_dim=32,
no_trans=True,
group_size=1,
trans_std=0.1)
# deformable pooling
dpooling = DCNv2Pooling(spatial_scale=1.0 / 4,
pooled_size=7,
output_dim=32,
no_trans=False,
group_size=1,
trans_std=0.1)
out = pooling(input, rois, offset)
dout = dpooling(input, rois, offset)
print(out.shape)
print(dout.shape)
target_out = out.new(*out.size())
target_out.data.uniform_(-0.01, 0.01)
target_dout = dout.new(*dout.size())
target_dout.data.uniform_(-0.01, 0.01)
e = (target_out - out).mean()
e.backward()
e = (target_dout - dout).mean()
e.backward()
def example_mdpooling():
input = torch.randn(2, 32, 64, 64)
input.requires_grad = True
batch_inds = torch.randint(2, (20, 1)).float()
x = torch.randint(256, (20, 1)).float()
y = torch.randint(256, (20, 1)).float()
w = torch.randint(64, (20, 1)).float()
h = torch.randint(64, (20, 1)).float()
rois = torch.cat((batch_inds, x, y, x + w, y + h), dim=1)
# mdformable pooling (V2)
dpooling = DCNPooling(spatial_scale=1.0 / 4,
pooled_size=7,
output_dim=32,
no_trans=False,
group_size=1,
trans_std=0.1,
deform_fc_dim=1024)
dout = dpooling(input, rois)
target = dout.new(*dout.size())
target.data.uniform_(-0.1, 0.1)
error = (target - dout).mean()
error.backward()
print(dout.shape)
if __name__ == '__main__':
example_dconv()
example_dpooling()
example_mdpooling()
check_pooling_zero_offset()
# zero offset check
if inC == outC:
check_zero_offset()
check_gradient_dpooling()
check_gradient_dconv()
# """
# ****** Note: backward is not reentrant error may not be a serious problem,
# ****** since the max error is less than 1e-7,
# ****** Still looking for what trigger this problem
# """
|
banmo-main
|
third_party/vcnplus/models/networks/DCNv2/DCN/testcpu.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import time
import torch
import torch.nn as nn
from torch.autograd import gradcheck
from dcn_v2 import dcn_v2_conv, DCNv2, DCN
from dcn_v2 import dcn_v2_pooling, DCNv2Pooling, DCNPooling
deformable_groups = 1
N, inC, inH, inW = 2, 2, 4, 4
outC = 2
kH, kW = 3, 3
def conv_identify(weight, bias):
weight.data.zero_()
bias.data.zero_()
o, i, h, w = weight.shape
y = h//2
x = w//2
for p in range(i):
for q in range(o):
if p == q:
weight.data[q, p, y, x] = 1.0
def check_zero_offset():
conv_offset = nn.Conv2d(inC, deformable_groups * 2 * kH * kW,
kernel_size=(kH, kW),
stride=(1, 1),
padding=(1, 1),
bias=True).cuda()
conv_mask = nn.Conv2d(inC, deformable_groups * 1 * kH * kW,
kernel_size=(kH, kW),
stride=(1, 1),
padding=(1, 1),
bias=True).cuda()
dcn_v2 = DCNv2(inC, outC, (kH, kW),
stride=1, padding=1, dilation=1,
deformable_groups=deformable_groups).cuda()
conv_offset.weight.data.zero_()
conv_offset.bias.data.zero_()
conv_mask.weight.data.zero_()
conv_mask.bias.data.zero_()
conv_identify(dcn_v2.weight, dcn_v2.bias)
input = torch.randn(N, inC, inH, inW).cuda()
offset = conv_offset(input)
mask = conv_mask(input)
mask = torch.sigmoid(mask)
output = dcn_v2(input, offset, mask)
output *= 2
d = (input - output).abs().max()
if d < 1e-10:
print('Zero offset passed')
else:
print('Zero offset failed')
print(input)
print(output)
def check_gradient_dconv():
input = torch.rand(N, inC, inH, inW).cuda() * 0.01
input.requires_grad = True
offset = torch.randn(N, deformable_groups * 2 * kW * kH, inH, inW).cuda() * 2
# offset.data.zero_()
# offset.data -= 0.5
offset.requires_grad = True
mask = torch.rand(N, deformable_groups * 1 * kW * kH, inH, inW).cuda()
# mask.data.zero_()
mask.requires_grad = True
mask = torch.sigmoid(mask)
weight = torch.randn(outC, inC, kH, kW).cuda()
weight.requires_grad = True
bias = torch.rand(outC).cuda()
bias.requires_grad = True
stride = 1
padding = 1
dilation = 1
print('check_gradient_dconv: ',
gradcheck(dcn_v2_conv, (input, offset, mask, weight, bias,
stride, padding, dilation, deformable_groups),
eps=1e-3, atol=1e-4, rtol=1e-2))
def check_pooling_zero_offset():
input = torch.randn(2, 16, 64, 64).cuda().zero_()
input[0, :, 16:26, 16:26] = 1.
input[1, :, 10:20, 20:30] = 2.
rois = torch.tensor([
[0, 65, 65, 103, 103],
[1, 81, 41, 119, 79],
]).cuda().float()
pooling = DCNv2Pooling(spatial_scale=1.0 / 4,
pooled_size=7,
output_dim=16,
no_trans=True,
group_size=1,
trans_std=0.0).cuda()
out = pooling(input, rois, input.new())
s = ', '.join(['%f' % out[i, :, :, :].mean().item()
for i in range(rois.shape[0])])
print(s)
dpooling = DCNv2Pooling(spatial_scale=1.0 / 4,
pooled_size=7,
output_dim=16,
no_trans=False,
group_size=1,
trans_std=0.0).cuda()
offset = torch.randn(20, 2, 7, 7).cuda().zero_()
dout = dpooling(input, rois, offset)
s = ', '.join(['%f' % dout[i, :, :, :].mean().item()
for i in range(rois.shape[0])])
print(s)
def check_gradient_dpooling():
input = torch.randn(2, 3, 5, 5).cuda().float() * 0.01
N = 4
batch_inds = torch.randint(2, (N, 1)).cuda().float()
x = torch.rand((N, 1)).cuda().float() * 15
y = torch.rand((N, 1)).cuda().float() * 15
w = torch.rand((N, 1)).cuda().float() * 10
h = torch.rand((N, 1)).cuda().float() * 10
rois = torch.cat((batch_inds, x, y, x + w, y + h), dim=1)
offset = torch.randn(N, 2, 3, 3).cuda()
input.requires_grad = True
offset.requires_grad = True
spatial_scale = 1.0 / 4
pooled_size = 3
output_dim = 3
no_trans = 0
group_size = 1
trans_std = 0.0
sample_per_part = 4
part_size = pooled_size
print('check_gradient_dpooling:',
gradcheck(dcn_v2_pooling, (input, rois, offset,
spatial_scale,
pooled_size,
output_dim,
no_trans,
group_size,
part_size,
sample_per_part,
trans_std),
eps=1e-4))
def example_dconv():
input = torch.randn(2, 64, 128, 128).cuda()
# wrap all things (offset and mask) in DCN
dcn = DCN(64, 64, kernel_size=(3, 3), stride=1,
padding=1, deformable_groups=2).cuda()
# print(dcn.weight.shape, input.shape)
output = dcn(input)
targert = output.new(*output.size())
targert.data.uniform_(-0.01, 0.01)
error = (targert - output).mean()
error.backward()
print(output.shape)
def example_dpooling():
input = torch.randn(2, 32, 64, 64).cuda()
batch_inds = torch.randint(2, (20, 1)).cuda().float()
x = torch.randint(256, (20, 1)).cuda().float()
y = torch.randint(256, (20, 1)).cuda().float()
w = torch.randint(64, (20, 1)).cuda().float()
h = torch.randint(64, (20, 1)).cuda().float()
rois = torch.cat((batch_inds, x, y, x + w, y + h), dim=1)
offset = torch.randn(20, 2, 7, 7).cuda()
input.requires_grad = True
offset.requires_grad = True
# normal roi_align
pooling = DCNv2Pooling(spatial_scale=1.0 / 4,
pooled_size=7,
output_dim=32,
no_trans=True,
group_size=1,
trans_std=0.1).cuda()
# deformable pooling
dpooling = DCNv2Pooling(spatial_scale=1.0 / 4,
pooled_size=7,
output_dim=32,
no_trans=False,
group_size=1,
trans_std=0.1).cuda()
out = pooling(input, rois, offset)
dout = dpooling(input, rois, offset)
print(out.shape)
print(dout.shape)
target_out = out.new(*out.size())
target_out.data.uniform_(-0.01, 0.01)
target_dout = dout.new(*dout.size())
target_dout.data.uniform_(-0.01, 0.01)
e = (target_out - out).mean()
e.backward()
e = (target_dout - dout).mean()
e.backward()
def example_mdpooling():
input = torch.randn(2, 32, 64, 64).cuda()
input.requires_grad = True
batch_inds = torch.randint(2, (20, 1)).cuda().float()
x = torch.randint(256, (20, 1)).cuda().float()
y = torch.randint(256, (20, 1)).cuda().float()
w = torch.randint(64, (20, 1)).cuda().float()
h = torch.randint(64, (20, 1)).cuda().float()
rois = torch.cat((batch_inds, x, y, x + w, y + h), dim=1)
# mdformable pooling (V2)
dpooling = DCNPooling(spatial_scale=1.0 / 4,
pooled_size=7,
output_dim=32,
no_trans=False,
group_size=1,
trans_std=0.1,
deform_fc_dim=1024).cuda()
dout = dpooling(input, rois)
target = dout.new(*dout.size())
target.data.uniform_(-0.1, 0.1)
error = (target - dout).mean()
error.backward()
print(dout.shape)
if __name__ == '__main__':
example_dconv()
example_dpooling()
example_mdpooling()
check_pooling_zero_offset()
# zero offset check
if inC == outC:
check_zero_offset()
check_gradient_dpooling()
check_gradient_dconv()
# """
# ****** Note: backward is not reentrant error may not be a serious problem,
# ****** since the max error is less than 1e-7,
# ****** Still looking for what trigger this problem
# """
|
banmo-main
|
third_party/vcnplus/models/networks/DCNv2/DCN/testcuda.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import math
import torch
from torch import nn
from torch.autograd import Function
from torch.nn.modules.utils import _pair
from torch.autograd.function import once_differentiable
import _ext as _backend
class _DCNv2(Function):
@staticmethod
def forward(ctx, input, offset, mask, weight, bias,
stride, padding, dilation, deformable_groups):
ctx.stride = _pair(stride)
ctx.padding = _pair(padding)
ctx.dilation = _pair(dilation)
ctx.kernel_size = _pair(weight.shape[2:4])
ctx.deformable_groups = deformable_groups
output = _backend.dcn_v2_forward(input, weight, bias,
offset, mask,
ctx.kernel_size[0], ctx.kernel_size[1],
ctx.stride[0], ctx.stride[1],
ctx.padding[0], ctx.padding[1],
ctx.dilation[0], ctx.dilation[1],
ctx.deformable_groups)
ctx.save_for_backward(input, offset, mask, weight, bias)
return output
@staticmethod
@once_differentiable
def backward(ctx, grad_output):
input, offset, mask, weight, bias = ctx.saved_tensors
grad_input, grad_offset, grad_mask, grad_weight, grad_bias = \
_backend.dcn_v2_backward(input, weight,
bias,
offset, mask,
grad_output,
ctx.kernel_size[0], ctx.kernel_size[1],
ctx.stride[0], ctx.stride[1],
ctx.padding[0], ctx.padding[1],
ctx.dilation[0], ctx.dilation[1],
ctx.deformable_groups)
return grad_input, grad_offset, grad_mask, grad_weight, grad_bias,\
None, None, None, None,
dcn_v2_conv = _DCNv2.apply
class DCNv2(nn.Module):
def __init__(self, in_channels, out_channels,
kernel_size, stride, padding, dilation=1, deformable_groups=1):
super(DCNv2, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = _pair(kernel_size)
self.stride = _pair(stride)
self.padding = _pair(padding)
self.dilation = _pair(dilation)
self.deformable_groups = deformable_groups
self.weight = nn.Parameter(torch.Tensor(
out_channels, in_channels, *self.kernel_size))
self.bias = nn.Parameter(torch.Tensor(out_channels))
self.reset_parameters()
def reset_parameters(self):
n = self.in_channels
for k in self.kernel_size:
n *= k
stdv = 1. / math.sqrt(n)
self.weight.data.uniform_(-stdv, stdv)
self.bias.data.zero_()
def forward(self, input, offset, mask):
assert 2 * self.deformable_groups * self.kernel_size[0] * self.kernel_size[1] == \
offset.shape[1]
assert self.deformable_groups * self.kernel_size[0] * self.kernel_size[1] == \
mask.shape[1]
return dcn_v2_conv(input, offset, mask,
self.weight,
self.bias,
self.stride,
self.padding,
self.dilation,
self.deformable_groups)
class DCN(DCNv2):
def __init__(self, in_channels, out_channels,
kernel_size, stride, padding,
dilation=1, deformable_groups=1):
super(DCN, self).__init__(in_channels, out_channels,
kernel_size, stride, padding, dilation, deformable_groups)
channels_ = self.deformable_groups * 3 * self.kernel_size[0] * self.kernel_size[1]
self.conv_offset_mask = nn.Conv2d(self.in_channels,
channels_,
kernel_size=self.kernel_size,
stride=self.stride,
padding=self.padding,
bias=True)
self.init_offset()
def init_offset(self):
self.conv_offset_mask.weight.data.zero_()
self.conv_offset_mask.bias.data.zero_()
def forward(self, input):
out = self.conv_offset_mask(input)
o1, o2, mask = torch.chunk(out, 3, dim=1)
offset = torch.cat((o1, o2), dim=1)
mask = torch.sigmoid(mask)
return dcn_v2_conv(input, offset, mask,
self.weight, self.bias,
self.stride,
self.padding,
self.dilation,
self.deformable_groups)
class _DCNv2Pooling(Function):
@staticmethod
def forward(ctx, input, rois, offset,
spatial_scale,
pooled_size,
output_dim,
no_trans,
group_size=1,
part_size=None,
sample_per_part=4,
trans_std=.0):
ctx.spatial_scale = spatial_scale
ctx.no_trans = int(no_trans)
ctx.output_dim = output_dim
ctx.group_size = group_size
ctx.pooled_size = pooled_size
ctx.part_size = pooled_size if part_size is None else part_size
ctx.sample_per_part = sample_per_part
ctx.trans_std = trans_std
output, output_count = \
_backend.dcn_v2_psroi_pooling_forward(input, rois, offset,
ctx.no_trans, ctx.spatial_scale,
ctx.output_dim, ctx.group_size,
ctx.pooled_size, ctx.part_size,
ctx.sample_per_part, ctx.trans_std)
ctx.save_for_backward(input, rois, offset, output_count)
return output
@staticmethod
@once_differentiable
def backward(ctx, grad_output):
input, rois, offset, output_count = ctx.saved_tensors
grad_input, grad_offset = \
_backend.dcn_v2_psroi_pooling_backward(grad_output,
input,
rois,
offset,
output_count,
ctx.no_trans,
ctx.spatial_scale,
ctx.output_dim,
ctx.group_size,
ctx.pooled_size,
ctx.part_size,
ctx.sample_per_part,
ctx.trans_std)
return grad_input, None, grad_offset, \
None, None, None, None, None, None, None, None
dcn_v2_pooling = _DCNv2Pooling.apply
class DCNv2Pooling(nn.Module):
def __init__(self,
spatial_scale,
pooled_size,
output_dim,
no_trans,
group_size=1,
part_size=None,
sample_per_part=4,
trans_std=.0):
super(DCNv2Pooling, self).__init__()
self.spatial_scale = spatial_scale
self.pooled_size = pooled_size
self.output_dim = output_dim
self.no_trans = no_trans
self.group_size = group_size
self.part_size = pooled_size if part_size is None else part_size
self.sample_per_part = sample_per_part
self.trans_std = trans_std
def forward(self, input, rois, offset):
assert input.shape[1] == self.output_dim
if self.no_trans:
offset = input.new()
return dcn_v2_pooling(input, rois, offset,
self.spatial_scale,
self.pooled_size,
self.output_dim,
self.no_trans,
self.group_size,
self.part_size,
self.sample_per_part,
self.trans_std)
class DCNPooling(DCNv2Pooling):
def __init__(self,
spatial_scale,
pooled_size,
output_dim,
no_trans,
group_size=1,
part_size=None,
sample_per_part=4,
trans_std=.0,
deform_fc_dim=1024):
super(DCNPooling, self).__init__(spatial_scale,
pooled_size,
output_dim,
no_trans,
group_size,
part_size,
sample_per_part,
trans_std)
self.deform_fc_dim = deform_fc_dim
if not no_trans:
self.offset_mask_fc = nn.Sequential(
nn.Linear(self.pooled_size * self.pooled_size *
self.output_dim, self.deform_fc_dim),
nn.ReLU(inplace=True),
nn.Linear(self.deform_fc_dim, self.deform_fc_dim),
nn.ReLU(inplace=True),
nn.Linear(self.deform_fc_dim, self.pooled_size *
self.pooled_size * 3)
)
self.offset_mask_fc[4].weight.data.zero_()
self.offset_mask_fc[4].bias.data.zero_()
def forward(self, input, rois):
offset = input.new()
if not self.no_trans:
# do roi_align first
n = rois.shape[0]
roi = dcn_v2_pooling(input, rois, offset,
self.spatial_scale,
self.pooled_size,
self.output_dim,
True, # no trans
self.group_size,
self.part_size,
self.sample_per_part,
self.trans_std)
# build mask and offset
offset_mask = self.offset_mask_fc(roi.view(n, -1))
offset_mask = offset_mask.view(
n, 3, self.pooled_size, self.pooled_size)
o1, o2, mask = torch.chunk(offset_mask, 3, dim=1)
offset = torch.cat((o1, o2), dim=1)
mask = torch.sigmoid(mask)
# do pooling with offset and mask
return dcn_v2_pooling(input, rois, offset,
self.spatial_scale,
self.pooled_size,
self.output_dim,
self.no_trans,
self.group_size,
self.part_size,
self.sample_per_part,
self.trans_std) * mask
# only roi_align
return dcn_v2_pooling(input, rois, offset,
self.spatial_scale,
self.pooled_size,
self.output_dim,
self.no_trans,
self.group_size,
self.part_size,
self.sample_per_part,
self.trans_std)
|
banmo-main
|
third_party/vcnplus/models/networks/DCNv2/DCN/dcn_v2.py
|
from .dcn_v2 import *
|
banmo-main
|
third_party/vcnplus/models/networks/DCNv2/DCN/__init__.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import sys
sys.path.insert(0,'third_party')
sys.path.insert(0,'./')
import numpy as np
import trimesh
import torch
import cv2
import pdb
from scipy.spatial.transform import Rotation as R
from utils.io import mkdir_p
import argparse
parser = argparse.ArgumentParser(description='render camera trajectories')
parser.add_argument('--outdir', default='tmp/traj',
help='output dir')
parser.add_argument('--nframes', default=90,type=int,
help='number of frames to render')
parser.add_argument('--alpha', default=0.5,type=float,
help='0-1, percentage of a full cycle')
parser.add_argument('--init_a', default=0.5,type=float,
help='0-1, percentage of a full cycle for initial pose')
parser.add_argument('--focal', default=2,type=float,
help='focal length')
parser.add_argument('--d_obj', default=3,type=float,
help='object depth')
parser.add_argument('--can_rand', dest='can_rand',action='store_true',
help='ranomize canonical space')
parser.add_argument('--img_size', default=512,type=int,
help='image size')
args = parser.parse_args()
## io
img_size = args.img_size
d_obj = args.d_obj
mkdir_p(args.outdir)
rot_rand = torch.Tensor(R.random().as_matrix()).cuda()
# to be compatible with other seqs
base_rmat = torch.eye(3).cuda()
base_rmat[0,0] = -1
base_rmat[1,1] = -1
for i in range(0,args.nframes):
# set cameras
#rotx = np.random.rand()
rotx=0.
if i==0: rotx=0.
roty = args.init_a*6.28+args.alpha*6.28*i/args.nframes
rotz = 0.
Rmat = cv2.Rodrigues(np.asarray([rotx, roty, rotz]))[0]
Rmat = torch.Tensor(Rmat).cuda()
# random rot
if args.can_rand:
Rmat = Rmat.matmul(rot_rand.T)
Rmat = Rmat.matmul(base_rmat)
Tmat = torch.Tensor([0,0,d_obj] ).cuda()
K = torch.Tensor([args.focal,args.focal,0,0] ).cuda()
Kimg = torch.Tensor([args.focal*img_size/2.,args.focal*img_size/2.,img_size/2.,img_size/2.] ).cuda()
# add RTK: [R_3x3|T_3x1]
# [fx,fy,px,py], to the ndc space
rtk = np.zeros((4,4))
rtk[:3,:3] = Rmat.cpu().numpy()
rtk[:3, 3] = Tmat.cpu().numpy()
rtk[3, :] = Kimg .cpu().numpy()
np.savetxt('%s/%05d.txt' %(args.outdir,i),rtk)
|
banmo-main
|
scripts/misc/generate_traj.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
# python scripts/add_cam_noise.py cam-files/cse-ama/ 30
import cv2
import numpy as np
import pdb
import sys
import glob
import os
cam_dir=sys.argv[1]
std_rot=float(sys.argv[2]) # deg
seqname=cam_dir.split('/')[-2]
std=np.pi/180*std_rot
odir='%s-gauss-%d'%(cam_dir.rsplit('/',1)[-2],std_rot)
os.makedirs(odir, exist_ok=True)
camlist = glob.glob('%s/*.txt'%(cam_dir))
camlist = sorted(camlist)
for idx,path in enumerate(camlist):
rtk = np.loadtxt(path)
rtk_mod = rtk.copy()
# random rot
rot_rand = np.random.normal(0,std,3)
rot_rand = cv2.Rodrigues(rot_rand)[0]
rtk_mod[:3,:3] = rot_rand.dot(rtk_mod[:3,:3])
rtk_mod[:2,3] = 0
rtk_mod[2,3] = 3
fid = path.rsplit('/',1)[1]
path_mod = '%s/%s'%(odir,fid)
np.savetxt(path_mod, rtk_mod)
print(rtk)
|
banmo-main
|
scripts/misc/add_cam_noise.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
# from: https://gist.github.com/adewes/5884820
import random
def get_random_color(pastel_factor = 0.5):
return [(x+pastel_factor)/(1.0+pastel_factor) for x in [random.uniform(0,1.0) for i in [1,2,3]]]
def color_distance(c1,c2):
return sum([abs(x[0]-x[1]) for x in zip(c1,c2)])
def generate_new_color(existing_colors,pastel_factor = 0.5):
max_distance = None
best_color = None
for i in range(0,100):
color = get_random_color(pastel_factor = pastel_factor)
if not existing_colors:
return color
best_distance = min([color_distance(color,c) for c in existing_colors])
if not max_distance or best_distance > max_distance:
max_distance = best_distance
best_color = color
return best_color
if __name__ == '__main__':
#To make your color choice reproducible, uncomment the following line:
random.seed(10)
colors = []
for i in range(0,65):
colors.append(generate_new_color(colors,pastel_factor = 0.1))
import numpy as np
print((np.asarray(colors)*255).astype(int))
|
banmo-main
|
scripts/misc/random_colors.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import sys, os
sys.path.append(os.path.dirname(os.path.dirname(sys.path[0])))
os.environ["PYOPENGL_PLATFORM"] = "egl" #opengl seems to only work with TPU
sys.path.insert(0,'third_party')
import subprocess
import imageio
import glob
from utils.io import save_vid
import matplotlib.pyplot as plt
import numpy as np
import torch
import cv2
import pdb
import argparse
import trimesh
from nnutils.geom_utils import obj_to_cam, pinhole_cam, obj2cam_np
from dataloader import frameloader
import pyrender
from pyrender import IntrinsicsCamera,Mesh, Node, Scene,OffscreenRenderer
import configparser
import matplotlib
cmap = matplotlib.cm.get_cmap('cool')
from utils.io import config_to_dataloader, draw_cams, str_to_frame, \
extract_data_info
import pytorch3d
import pytorch3d.ops
parser = argparse.ArgumentParser(description='render mesh')
parser.add_argument('--testdir', default='',
help='path to test dir')
parser.add_argument('--seqname', default='camel',
help='sequence to test')
parser.add_argument('--outpath', default='/data/gengshay/output.gif',
help='output path')
parser.add_argument('--overlay', default='no',
help='whether to overlay with the input')
parser.add_argument('--cam_type', default='perspective',
help='camera model, orthographic or perspective')
parser.add_argument('--vis_bones', dest='vis_bones',action='store_true',
help='whether show transparent surface and vis bones')
parser.add_argument('--vis_cam', dest='vis_cam',action='store_true',
help='whether show camera trajectory')
parser.add_argument('--vis_traj', dest='vis_traj', action='store_true',
help='whether show trajectory of vertices')
parser.add_argument('--append_img', default='no',
help='whether append images before the seq')
parser.add_argument('--append_render', default='yes',
help='whether append renderings')
parser.add_argument('--nosmooth', dest='smooth', action='store_false',
help='whether to smooth vertex colors and positions')
parser.add_argument('--corresp', dest='corresp', action='store_true',
help='whether to render correspondence')
parser.add_argument('--floor', dest='floor', action='store_true',
help='whether to add floor')
parser.add_argument('--show_dp', dest='show_dp',action='store_true',
help='whether to visualizae densepose if available')
parser.add_argument('--freeze', dest='freeze',action='store_true',
help='freeze object at frist frame')
parser.add_argument('--rest', dest='rest',action='store_true',
help='render rest object shape')
parser.add_argument('--vp', default=0, type=int,
help='which viewpoint to render 0,1,2')
parser.add_argument('--gtdir', default='',
help='path to gt dir')
parser.add_argument('--test_frames', default='9',
help='a list of video index or num of frames, {0,1,2}, 30')
parser.add_argument('--root_frames', default='',
help='a list of video index or num of frames, {0,1,2}, 30')
parser.add_argument('--gt_pmat',
default='/private/home/gengshany/data/AMA/T_swing/calibration/Camera1.Pmat.cal',
help='path to ama projection matrix, evaluation only')
parser.add_argument('--vis_gtmesh', dest='vis_gtmesh', action='store_true',
help='whether to visualize ground-truth mesh in eval')
parser.add_argument('--clean', dest='clean', action='store_true',
help='whether to use cc to clean up input mesh')
parser.add_argument('--gray_color', dest='gray_color', action='store_true',
help='whether to overwrite color with gray')
args = parser.parse_args()
gt_meshes = [trimesh.load(i, process=False) for i in sorted( glob.glob('%s/*.obj'%(args.gtdir)) )]
def main():
print(args.testdir)
if args.rest:
mesh_rest = trimesh.load('%s/mesh-rest.obj'%(args.testdir),process=False)
# read all the data
all_anno = []
all_mesh = []
all_bone = []
all_cam = []
all_fr = []
# eval dataloader
opts_dict = {}
opts_dict['seqname'] = args.seqname
opts_dict['img_size'] = 512 # dummy value
opts_dict['rtk_path'] = ''
evalloader = frameloader.eval_loader(opts_dict)
data_info = extract_data_info(evalloader)
idx_render = str_to_frame(args.test_frames, data_info)
if args.root_frames=='': idx_render_root = idx_render
else: idx_render_root = str_to_frame(args.root_frames, data_info)
# get eval frames
imglist = []
for dataset in evalloader.dataset.datasets:
imglist += dataset.imglist[:-1] # excluding the last frame
rootlist =[imglist[i] for i in idx_render_root]
imglist = [imglist[i] for i in idx_render]
seqname_list = []
## subsumple frames ##This may cause bug at nvs##
#if len(imglist)>150:
# imglist = imglist[::(len(imglist)//150)]
rootlist = [rootlist[i] for i in \
np.linspace(0,len(rootlist)-1,len(imglist),dtype=int)]
for idx,name in enumerate(imglist):
rgb_img = cv2.imread(name)
if args.show_dp:
# replace with densepose
name1, name2 = name.rsplit('/',1)
dppath = '%s/vis-%s'%(name1.replace('JPEGImages', 'Densepose'), name2)
if os.path.exists(dppath):
rgb_img = cv2.resize(cv2.imread(dppath), rgb_img.shape[:2][::-1])
try: sil_img = cv2.imread(name.replace('JPEGImages', 'Annotations').replace('.jpg', '.png'),0)[:,:,None]
except: sil_img = np.zeros(rgb_img.shape)[:,:,0]
all_anno.append([rgb_img,sil_img,0,0,name])
seqname = name.split('/')[-2]
seqname_list.append(seqname)
fr = int(name.split('/')[-1].split('.')[-2])
all_fr.append(fr)
print('%s/%d'%(seqname, fr))
if args.append_render=="yes":
try:
mesh = trimesh.load('%s/%s-mesh-%05d.obj'%(args.testdir, seqname, fr),process=False)
if args.clean:
# keep the largest mesh
mesh = [i for i in mesh.split(only_watertight=False)]
mesh = sorted(mesh, key=lambda x:x.vertices.shape[0])
mesh = mesh[-1]
if args.gray_color:
mesh.visual.vertex_colors[:,:3]=128 # necessary for color override
all_mesh.append(mesh)
name_root = rootlist[idx]
seqname_root = name_root.split('/')[-2]
fr_root = int(name_root.split('/')[-1].split('.')[-2])
cam = np.loadtxt('%s/%s-cam-%05d.txt'%(args.testdir, seqname_root, fr_root))
all_cam.append(cam)
bone = trimesh.load('%s/%s-bone-%05d.obj'%(args.testdir, seqname,fr),process=False)
all_bone.append(bone)
except: print('no mesh found')
else:
# dummy variable
mesh = trimesh.creation.uv_sphere(radius=1,count=[2, 2])
all_mesh.append(mesh)
# process bones, trajectories and cameras
num_original_verts = []
num_original_faces = []
pts_trajs = []
col_trajs = []
traj_len = len(all_mesh) #TODO shuld be dependent on the seqname
pts_num = len(all_mesh[0].vertices)
traj_num = min(1000, pts_num)
traj_idx = np.random.choice(pts_num, traj_num)
scene_scale = np.abs(all_mesh[0].vertices).max()
for i in range(len(all_mesh)):
if args.vis_bones:
all_mesh[i].visual.vertex_colors[:,-1]=254 # necessary for color override
num_original_verts.append( all_mesh[i].vertices.shape[0])
num_original_faces.append( all_mesh[i].faces.shape[0] )
try: bone=all_bone[i]
except: bone=trimesh.Trimesh()
all_mesh[i] = trimesh.util.concatenate([all_mesh[i], bone])
# change color according to time
if args.vis_traj:
pts_traj = np.zeros((traj_len, traj_num,2,3))
col_traj = np.zeros((traj_len, traj_num,2,4))
for j in range(traj_len):
if i-j-1<0 or seqname_list[j] != seqname_list[i]: continue
pts_traj[j,:,0] = all_mesh[i-j-1].vertices[traj_idx]
pts_traj[j,:,1] = all_mesh[i-j].vertices [traj_idx]
col_traj[j,:,0] = cmap(float(i-j-1)/traj_len)
col_traj[j,:,1] = cmap(float(i-j)/traj_len)
pts_trajs.append(pts_traj)
col_trajs.append(col_traj)
# change color according to time
if args.vis_cam:
mesh_cam = draw_cams(all_cam, axis=False)
mesh_cam.export('%s/mesh_cam-%s.obj'%(args.testdir,seqname))
# read images
input_size = all_anno[0][0].shape[:2]
#output_size = input_size
output_size = (int(input_size[0] * 480/input_size[1]), 480)# 270x480
frames=[]
ctrajs=[]
rndsils=[]
cd_ave=[] # average chamfer distance
f001=[] # f@1%
f002=[]
f005=[]
if args.append_img=="yes":
if args.append_render=='yes':
if args.freeze: napp_fr = 30
else: napp_fr = int(len(all_anno)//5)
for i in range(napp_fr):
frames.append(cv2.resize(all_anno[0][0],output_size[::-1])[:,:,::-1])
else:
for i in range(len(all_anno)):
#silframe=cv2.resize((all_anno[i][1]>0).astype(float),output_size[::-1])*255
imgframe=cv2.resize(all_anno[i][0],output_size[::-1])[:,:,::-1]
#redframe=(np.asarray([1,0,0])[None,None] * silframe[:,:,None]).astype(np.uint8)
#imgframe = cv2.addWeighted(imgframe, 1, redframe, 0.5, 0)
frames.append(imgframe)
#frames.append(cv2.resize(all_anno[i][1],output_size[::-1])*255) # silhouette
#frames.append(cv2.resize(all_anno[i][0],output_size[::-1])[:,:,::-1]) # frame
#strx = sorted(glob.glob('%s/*'%datapath))[i]# kp
#strx = strx.replace('JPEGImages', 'KP')
#kpimg = cv2.imread('%s/%s'%(strx.rsplit('/',1)[0],strx.rsplit('/',1)[1].replace('.jpg', '_rendered.png')))
#frames.append(cv2.resize(kpimg,output_size[::-1])[:,:,::-1])
#strx = sorted(glob.glob('%s/*'%datapath))[init_frame:end_frame][::dframe][i]# flow
#strx = strx.replace('JPEGImages', 'FlowBW')
#flowimg = cv2.imread('%s/vis-%s'%(strx.rsplit('/',1)[0],strx.rsplit('/',1)[1]))
#frames.append(cv2.resize(flowimg,output_size[::-1])[:,:,::-1])
# process cameras
theta = 9*np.pi/9
#theta = 7*np.pi/9
init_light_pose = np.asarray([[1,0,0,0],[0,np.cos(theta),-np.sin(theta),0],[0,np.sin(theta),np.cos(theta),0],[0,0,0,1]])
init_light_pose0 =np.asarray([[1,0,0,0],[0,0,-1,0],[0,1,0,0],[0,0,0,1]])
if args.freeze or args.rest:
size = len(all_mesh)
#size = 150
else:
size = len(all_mesh)
for i in range(size):
if args.append_render=='no':break
# render flow between mesh 1 and 2
if args.freeze or args.rest:
print(i)
refimg, refsil, refkp, refvis, refname = all_anno[0]
img_size = max(refimg.shape)
if args.freeze: refmesh = all_mesh[0]
elif args.rest: refmesh = mesh_rest
#refmesh.vertices -= refmesh.vertices.mean(0)[None]
#refmesh.vertices /= 1.2*np.abs(refmesh.vertices).max()
refcam = all_cam[0].copy()
rot_turntb = cv2.Rodrigues(np.asarray([0.,i*2*np.pi/size,0.]))[0]
refcam[:3,:3] = rot_turntb.dot( refcam[:3,:3] )
refcam[:2,3] = 0 # trans xy
if args.vis_cam:
refcam[2,3] = 10 # depth
refcam[3,:2] = 8*img_size/2 # fl
refcam[3,2] = refimg.shape[1]/2 # px py
refcam[3,3] = refimg.shape[0]/2 # px py
else:
refimg, refsil, refkp, refvis, refname = all_anno[i]
print('%s'%(refname))
img_size = max(refimg.shape)
refmesh = all_mesh[i]
refcam = all_cam[i]
# load vertices
refface = torch.Tensor(refmesh.faces[None]).cuda()
verts = torch.Tensor(refmesh.vertices[None]).cuda()
# change viewpoint
vp_tmat = refcam[:3,3]
vp_kmat = refcam[3]
if args.vp==-1:
# static camera
#vp_rmat = (refcam[:3,:3].T).dot(all_cam[0][:3,:3])
vp_rmat = all_cam[0][:3,:3].dot(refcam[:3,:3].T)
# vp_rmat = cv2.Rodrigues(np.asarray([np.pi/2,0,0]))[0].dot(vp_rmat) # bev
vp_tmat = all_cam[0][:3,3]
vp_kmat = all_cam[0][3].copy()
vp_kmat[2] = vp_kmat[2]/all_anno[0][0].shape[1]*all_anno[i][0].shape[1]
vp_kmat[3] = vp_kmat[3]/all_anno[0][0].shape[0]*all_anno[i][0].shape[0]
elif args.vp==-2:
# canonical camera
can_vis_rot = cv2.Rodrigues(np.asarray([0,np.pi/3,0]))[0].dot(\
cv2.Rodrigues(np.asarray([np.pi, 0,0 ]))[0])
vp_rmat = can_vis_rot.dot(refcam[:3,:3].T)
vp_tmat = np.zeros(3)
vp_tmat[2] = all_cam[0][2,3]
vp_kmat = all_cam[0][3].copy()
vp_kmat[2] = vp_kmat[2]/all_anno[0][0].shape[1]*all_anno[i][0].shape[1]
vp_kmat[3] = vp_kmat[3]/all_anno[0][0].shape[0]*all_anno[i][0].shape[0]
elif args.vp==1:
vp_rmat = cv2.Rodrigues(np.asarray([0,np.pi/2,0]))[0]
elif args.vp==2:
vp_rmat = cv2.Rodrigues(np.asarray([np.pi/2,0,0]))[0]
else:
vp_rmat = cv2.Rodrigues(np.asarray([0.,0,0]))[0]
refcam_vp = refcam.copy()
#refcam_vp[:3,:3] = refcam_vp[:3,:3].dot(vp_rmat)
refcam_vp[:3,:3] = vp_rmat.dot(refcam_vp[:3,:3])
if args.vp==1 or args.vp==2:
vmean = verts[0].mean(0).cpu()
vp_tmat[:2] = (-refcam_vp[:3,:3].dot(vmean))[:2]
refcam_vp[:3,3] = vp_tmat
refcam_vp[3] = vp_kmat
# render
Rmat = torch.Tensor(refcam_vp[None,:3,:3]).cuda()
Tmat = torch.Tensor(refcam_vp[None,:3,3]).cuda()
ppoint =refcam_vp[3,2:]
focal = refcam_vp[3,:2]
verts = obj_to_cam(verts, Rmat, Tmat)
r = OffscreenRenderer(img_size, img_size)
colors = refmesh.visual.vertex_colors
scene = Scene(ambient_light=0.4*np.asarray([1.,1.,1.,1.]))
direc_l = pyrender.DirectionalLight(color=np.ones(3), intensity=6.0)
colors= np.concatenate([0.6*colors[:,:3].astype(np.uint8), colors[:,3:]],-1) # avoid overexposure
# project trajectories to image
if args.vis_traj:
pts_trajs[i] = obj2cam_np(pts_trajs[i], Rmat, Tmat)
if args.vis_cam:
mesh_cam_transformed = mesh_cam.copy()
mesh_cam_transformed.vertices = obj2cam_np(mesh_cam_transformed.vertices, Rmat, Tmat)
# compute error if ground-truth is given
if len(args.gtdir)>0:
if len(gt_meshes)>0:
verts_gt = torch.Tensor(gt_meshes[i].vertices[None]).cuda()
refface_gt=torch.Tensor(gt_meshes[i].faces[None]).cuda()
else:
verts_gt = verts
refface_gt = refface
# ama camera coord -> scale -> our camera coord
if args.gt_pmat!='canonical':
pmat = np.loadtxt(args.gt_pmat)
K,R,T,_,_,_,_=cv2.decomposeProjectionMatrix(pmat)
Rmat_gt = R
Tmat_gt = T[:3,0]/T[-1,0]
Tmat_gt = Rmat_gt.dot(-Tmat_gt[...,None])[...,0]
K = K/K[-1,-1]
ppoint[0] = K[0,2]
ppoint[1] = K[1,2]
focal[0] = K[0,0]
focal[1] = K[1,1]
else:
Rmat_gt = np.eye(3)
Tmat_gt = np.asarray([0,0,0]) # assuming synthetic obj has depth 3
# render ground-truth to different viewpoint according to cam prediction
#Rmat_gt = refcam[:3,:3].T
#Tmat_gt = -refcam[:3,:3].T.dot(refcam[:3,3:4])[...,0]
#Rmat_gt = refcam_vp[:3,:3].dot(Rmat_gt)
#Tmat_gt = refcam_vp[:3,:3].dot(Tmat_gt[...,None])[...,0] + refcam_vp[:3,3]
# transform gt to camera
Rmat_gt = torch.Tensor(Rmat_gt).cuda()[None]
Tmat_gt = torch.Tensor(Tmat_gt).cuda()[None]
# max length of axis aligned bbox
bbox_max = float((verts_gt.max(1)[0]-verts_gt.min(1)[0]).max().cpu())
verts_gt = obj_to_cam(verts_gt, Rmat_gt, Tmat_gt)
import chamfer3D.dist_chamfer_3D
import fscore
chamLoss = chamfer3D.dist_chamfer_3D.chamfer_3DDist()
## use ICP for ours improve resutls
fitted_scale = verts_gt[...,-1].median() / verts[...,-1].median()
verts = verts*fitted_scale
frts = pytorch3d.ops.iterative_closest_point(verts,verts_gt, \
estimate_scale=False,max_iterations=100)
verts = ((frts.RTs.s*verts).matmul(frts.RTs.R)+frts.RTs.T[:,None])
## show registered meshes
#t=trimesh.Trimesh(verts[0].cpu()).export('tmp/0.obj')
#t=trimesh.Trimesh(verts_gt[0].cpu()).export('tmp/1.obj')
#pdb.set_trace()
raw_cd,raw_cd_back,_,_ = chamLoss(verts_gt,verts) # this returns distance squared
f1,_,_ = fscore.fscore(raw_cd, raw_cd_back,
threshold = (bbox_max*0.01)**2)
f2,_,_ = fscore.fscore(raw_cd, raw_cd_back,
threshold = (bbox_max*0.02)**2)
f5,_,_ = fscore.fscore(raw_cd, raw_cd_back,
threshold = (bbox_max*0.05)**2)
# sum
raw_cd = np.asarray(raw_cd.cpu()[0])
raw_cd_back = np.asarray(raw_cd_back.cpu()[0])
raw_cd = np.sqrt(raw_cd)
raw_cd_back = np.sqrt(raw_cd_back)
cd_mean = raw_cd.mean() + raw_cd_back.mean()
cd_ave.append(cd_mean)
f001.append( f1.cpu().numpy())
f002.append( f2.cpu().numpy())
f005.append( f5.cpu().numpy())
print('cd:%.2f cm'%(100*cd_mean))
cm = plt.get_cmap('plasma')
if args.vis_gtmesh:
verts = verts_gt
refface = refface_gt
colors = cm(raw_cd*5)
else:
colors = cm(raw_cd_back*5)
smooth=args.smooth
if args.freeze:
tbone = 0
else:
tbone = i
if args.vis_bones:
mesh = trimesh.Trimesh(vertices=np.asarray(verts[0,:num_original_verts[tbone],:3].cpu()), faces=np.asarray(refface[0,:num_original_faces[tbone]].cpu()),vertex_colors=colors)
meshr = Mesh.from_trimesh(mesh,smooth=smooth)
meshr._primitives[0].material.RoughnessFactor=.5
scene.add_node( Node(mesh=meshr ))
mesh2 = trimesh.Trimesh(vertices=np.asarray(verts[0,num_original_verts[tbone]:,:3].cpu()), faces=np.asarray(refface[0,num_original_faces[tbone]:].cpu()-num_original_verts[tbone]),vertex_colors=colors[num_original_verts[tbone]:])
if len(mesh2.vertices)>0:
mesh2=Mesh.from_trimesh(mesh2,smooth=smooth)
mesh2._primitives[0].material.RoughnessFactor=.5
scene.add_node( Node(mesh=mesh2))
else:
mesh = trimesh.Trimesh(vertices=np.asarray(verts[0,:,:3].cpu()), faces=np.asarray(refface[0].cpu()),vertex_colors=colors)
meshr = Mesh.from_trimesh(mesh,smooth=smooth)
meshr._primitives[0].material.RoughnessFactor=.5
scene.add_node( Node(mesh=meshr ))
if args.vis_traj:
pts = pts_trajs[i].reshape(-1,3)# np.asarray([[-1,-1,1],[1,1,1]]) # 2TxNx3
colors = col_trajs[i].reshape(-1,4)#np.random.uniform(size=pts.shape)
m = Mesh([pyrender.Primitive(pts,mode=1,color_0=colors)])
scene.add_node( Node(mesh=m))
if args.vis_cam:
mesh_cam_transformed=Mesh.from_trimesh(mesh_cam_transformed)
mesh_cam_transformed._primitives[0].material.RoughnessFactor=1.
scene.add_node( Node(mesh=mesh_cam_transformed))
floor_mesh = trimesh.load('./mesh_material/wood.obj',process=False)
floor_mesh.vertices = np.concatenate([floor_mesh.vertices[:,:1], floor_mesh.vertices[:,2:3], floor_mesh.vertices[:,1:2]],-1 )
xfloor = 10*mesh.vertices[:,0].min() + (10*mesh.vertices[:,0].max()-10*mesh.vertices[:,0].min())*(floor_mesh.vertices[:,0:1] - floor_mesh.vertices[:,0].min())/(floor_mesh.vertices[:,0].max()-floor_mesh.vertices[:,0].min())
yfloor = floor_mesh.vertices[:,1:2]; yfloor[:] = (mesh.vertices[:,1].max())
zfloor = 0.5*mesh.vertices[:,2].min() + (10*mesh.vertices[:,2].max()-0.5*mesh.vertices[:,2].min())*(floor_mesh.vertices[:,2:3] - floor_mesh.vertices[:,2].min())/(floor_mesh.vertices[:,2].max()-floor_mesh.vertices[:,2].min())
floor_mesh.vertices = np.concatenate([xfloor,yfloor,zfloor],-1)
floor_mesh = trimesh.Trimesh(floor_mesh.vertices, floor_mesh.faces, vertex_colors=255*np.ones((4,4), dtype=np.uint8))
if args.floor:
scene.add_node( Node(mesh=Mesh.from_trimesh(floor_mesh))) # overrides the prev. one
if args.cam_type=='perspective':
cam = IntrinsicsCamera(
focal[0],
focal[0],
ppoint[0],
ppoint[1],
znear=1e-3,zfar=1000)
else:
cam = pyrender.OrthographicCamera(xmag=1., ymag=1.)
cam_pose = -np.eye(4); cam_pose[0,0]=1; cam_pose[-1,-1]=1
cam_node = scene.add(cam, pose=cam_pose)
light_pose = init_light_pose
direc_l_node = scene.add(direc_l, pose=light_pose)
#if args.vis_bones:
# color, depth = r.render(scene,flags=pyrender.RenderFlags.SHADOWS_DIRECTIONAL)
#else:
# color, depth = r.render(scene,flags=pyrender.RenderFlags.SHADOWS_DIRECTIONAL | pyrender.RenderFlags.SKIP_CULL_FACES)
color, depth = r.render(scene,flags=pyrender.RenderFlags.SHADOWS_DIRECTIONAL | pyrender.RenderFlags.SKIP_CULL_FACES)
r.delete()
color = color[:refimg.shape[0],:refimg.shape[1],:3]
rndsil = (depth[:refimg.shape[0],:refimg.shape[1]]>0).astype(int)*100
if args.overlay=='yes':
color = cv2.addWeighted(color, 0.5, refimg[:,:,::-1], 0.5, 0)
prefix = (args.outpath).split('/')[-1].split('.')[0]
color = color.copy(); color[0,0,:] = 0
imoutpath = '%s/%s-mrender%03d.jpg'%(args.testdir, prefix,i)
cv2.imwrite(imoutpath,color[:,:,::-1] )
color = cv2.resize(color, output_size[::-1])
frames.append(color)
# TODO save cams
cam_scale = output_size[1] / rndsil.shape[1]
ctraj = torch.cat([Rmat, Tmat[...,None]],-1).cpu().numpy() # 1,3,4
kmat = np.asarray([focal[0]*cam_scale,
focal[0]*cam_scale,
ppoint[0]*cam_scale,
ppoint[1]*cam_scale])
ctraj = np.concatenate([ctraj,kmat[None,None,:]],1) # 1,4,4
ctrajs.append(ctraj[0])
rndsil = cv2.resize(rndsil.astype(np.int16), output_size[::-1])
rndsils.append(rndsil)
if args.gtdir != '':
cd_ave = np.asarray(cd_ave)
print('ave chamfer dis: %.1f cm'%(100*cd_ave.mean()))
print('max chamfer dis: %.1f cm'%(100*np.max(cd_ave)))
f001 = np.asarray(f001)
print('ave f-score at d=1%%: %.1f%%'%(100*np.mean(f001)))
print('min f-score at d=1%%: %.1f%%'%(100*np.min( f001)))
f002 = np.asarray(f002)
print('ave f-score at d=2%%: %.1f%%'%(100*np.mean(f002)))
print('min f-score at d=2%%: %.1f%%'%(100*np.min( f002)))
f005 = np.asarray(f005)
print('ave f-score at d=5%%: %.1f%%'%(100*np.mean(f005)))
print('min f-score at d=5%%: %.1f%%'%(100*np.min( f005)))
save_vid(args.outpath, frames, suffix='.gif')
save_vid(args.outpath, frames, suffix='.mp4',upsample_frame=0)
# save camera trajectory and reference sil
for idx in range(len(ctrajs)):
save_path = '%s-ctrajs-%05d.txt'%(args.outpath, idx)
np.savetxt(save_path, ctrajs[idx])
save_path = '%s-refsil-%05d.png'%(args.outpath, idx)
cv2.imwrite(save_path, rndsils[idx])
if __name__ == '__main__':
main()
|
banmo-main
|
scripts/visualize/render_vis.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import sys, os
sys.path.append(os.path.dirname(os.path.dirname(sys.path[0])))
os.environ["PYOPENGL_PLATFORM"] = "egl" #opengl seems to only work with TPU
curr_dir = os.path.abspath(os.getcwd())
sys.path.insert(0,curr_dir)
import pdb
import glob
import numpy as np
import configparser
from utils.io import config_to_dataloader, draw_cams, render_root_txt
cam_dir=sys.argv[1]
cap_frame=int(sys.argv[2])
def main():
render_root_txt(cam_dir, cap_frame)
# python ... path to camera folder
# will draw a trajectory of camera locations
if __name__ == '__main__':
main()
|
banmo-main
|
scripts/visualize/render_root_txt.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import sys, os
import pdb
sys.path.append(os.path.dirname(os.path.dirname(sys.path[0])))
os.environ["PYOPENGL_PLATFORM"] = "egl" #opengl seems to only work with TPU
curr_dir = os.path.abspath(os.getcwd())
sys.path.insert(0,curr_dir)
import subprocess
import imageio
import glob
from utils.io import save_vid
import matplotlib.pyplot as plt
import numpy as np
import torch
import cv2
import argparse
import trimesh
from nnutils.geom_utils import obj_to_cam, pinhole_cam, obj2cam_np
import pyrender
from pyrender import IntrinsicsCamera,Mesh, Node, Scene,OffscreenRenderer
import configparser
import matplotlib
cmap = matplotlib.cm.get_cmap('cool')
from utils.io import config_to_dataloader, draw_cams
parser = argparse.ArgumentParser(description='script to render cameras over epochs')
parser.add_argument('--testdir', default='',
help='path to test dir')
parser.add_argument('--cap_frame', default=-1,type=int,
help='number of frames to cap')
parser.add_argument('--first_idx', default=0,type=int,
help='first frame index to vis')
parser.add_argument('--last_idx', default=-1,type=int,
help='last frame index to vis')
parser.add_argument('--mesh_only', dest='mesh_only',action='store_true',
help='whether to only render rest mesh')
args = parser.parse_args()
img_size = 1024
def main():
# read all the data
logname = args.testdir.split('/')[-2]
varlist = [i for i in glob.glob('%s/vars_*.npy'%args.testdir) \
if 'latest.npy' not in i]
varlist = sorted(varlist,
key=lambda x:int(x.split('/')[-1].split('vars_')[-1].split('.npy')[0]))
# get first index that is used for optimization
var = np.load(varlist[-1],allow_pickle=True)[()]
var['rtk'] = var['rtk'][args.first_idx:args.last_idx]
first_valid_idx = np.linalg.norm(var['rtk'][:,:3,3], 2,-1)>0
first_valid_idx = np.argmax(first_valid_idx)
#varlist = varlist[1:]
if args.cap_frame>-1:
varlist = varlist[:args.cap_frame]
size = len(varlist)
mesh_cams = []
mesh_objs = []
for var_path in varlist:
# construct camera mesh
var = np.load(var_path,allow_pickle=True)[()]
var['rtk'] = var['rtk'][args.first_idx:args.last_idx]
mesh_cams.append(draw_cams(var['rtk'][first_valid_idx:]))
mesh_objs.append(var['mesh_rest'])
frames = []
# process cameras
for i in range(size):
print(i)
refcam = var['rtk'][first_valid_idx].copy()
## median camera trans
#mtrans = np.median(np.linalg.norm(var['rtk'][first_valid_idx:,:3,3],2,-1))
# max camera trans
mtrans = np.max(np.linalg.norm(var['rtk'][first_valid_idx:,:3,3],2,-1))
refcam[:2,3] = 0 # trans xy
refcam[2,3] = 4*mtrans # depth
refcam[3,:2] = 4*img_size/2 # fl
refcam[3,2] = img_size/2
refcam[3,3] = img_size/2
vp_rmat = refcam[:3,:3]
if args.mesh_only: refcam[3,:2] *= 2 # make it appear larger
else:
vp_rmat = cv2.Rodrigues(np.asarray([np.pi/2,0,0]))[0].dot(vp_rmat) # bev
refcam[:3,:3] = vp_rmat
# load vertices
refmesh = mesh_cams[i]
refface = torch.Tensor(refmesh.faces[None]).cuda()
verts = torch.Tensor(refmesh.vertices[None]).cuda()
# render
Rmat = torch.Tensor(refcam[None,:3,:3]).cuda()
Tmat = torch.Tensor(refcam[None,:3,3]).cuda()
ppoint =refcam[3,2:]
focal = refcam[3,:2]
verts = obj_to_cam(verts, Rmat, Tmat)
r = OffscreenRenderer(img_size, img_size)
colors = refmesh.visual.vertex_colors
scene = Scene(ambient_light=0.4*np.asarray([1.,1.,1.,1.]))
direc_l = pyrender.DirectionalLight(color=np.ones(3), intensity=6.0)
colors= np.concatenate([0.6*colors[:,:3].astype(np.uint8), colors[:,3:]],-1) # avoid overexposure
smooth=True
mesh = trimesh.Trimesh(vertices=np.asarray(verts[0,:,:3].cpu()), faces=np.asarray(refface[0].cpu()),vertex_colors=colors)
meshr = Mesh.from_trimesh(mesh,smooth=smooth)
meshr._primitives[0].material.RoughnessFactor=.5
if not args.mesh_only:
scene.add_node( Node(mesh=meshr ))
mesh_obj = mesh_objs[i]
if args.mesh_only:
# assign gray color
mesh_obj.visual.vertex_colors[...,:3] = 64
if len(mesh_obj.vertices)>0:
mesh_obj.vertices = obj2cam_np(mesh_obj.vertices, Rmat, Tmat)
mesh_obj=Mesh.from_trimesh(mesh_obj,smooth=smooth)
mesh_obj._primitives[0].material.RoughnessFactor=1.
scene.add_node( Node(mesh=mesh_obj))
cam = IntrinsicsCamera(
focal[0],
focal[0],
ppoint[0],
ppoint[1],
znear=1e-3,zfar=1000)
cam_pose = -np.eye(4); cam_pose[0,0]=1; cam_pose[-1,-1]=1
cam_node = scene.add(cam, pose=cam_pose)
light_pose =np.asarray([[1,0,0,0],[0,0,-1,0],[0,1,0,0],[0,0,0,1]],dtype=float)
light_pose[:3,:3] = cv2.Rodrigues(np.asarray([np.pi,0,0]))[0]
direc_l_node = scene.add(direc_l, pose=light_pose)
color, depth = r.render(scene,flags=pyrender.RenderFlags.SHADOWS_DIRECTIONAL | pyrender.RenderFlags.SKIP_CULL_FACES)
r.delete()
# save image
color = color.astype(np.uint8)
color = cv2.putText(color, 'epoch: %02d'%(i), (30,50),
cv2.FONT_HERSHEY_SIMPLEX,2, (256,0,0), 2)
imoutpath = '%s/mesh-cam-%02d.png'%(args.testdir,i)
cv2.imwrite(imoutpath,color[:,:,::-1] )
frames.append(color)
save_vid('%s/mesh-cam'%args.testdir, frames, suffix='.gif')
save_vid('%s/mesh-cam'%args.testdir, frames, suffix='.mp4',upsample_frame=-1)
if __name__ == '__main__':
main()
|
banmo-main
|
scripts/visualize/render_root.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
"""
bash scripts/render_nvs.sh
"""
from absl import flags, app
import sys
sys.path.insert(0,'')
sys.path.insert(0,'third_party')
import numpy as np
import torch
import os
import glob
import pdb
import cv2
import trimesh
from scipy.spatial.transform import Rotation as R
import imageio
from collections import defaultdict
from utils.io import save_vid, str_to_frame, save_bones, load_root, load_sils
from utils.colors import label_colormap
from nnutils.train_utils import v2s_trainer
from nnutils.geom_utils import obj_to_cam, tensor2array, vec_to_sim3, \
raycast, sample_xy, K2inv, get_near_far, \
chunk_rays
from nnutils.rendering import render_rays
from ext_utils.util_flow import write_pfm
from ext_utils.flowlib import cat_imgflo
opts = flags.FLAGS
# script specific ones
flags.DEFINE_integer('maxframe', 0, 'maximum number frame to render')
flags.DEFINE_integer('vidid', 0, 'video id that determines the env code')
flags.DEFINE_integer('bullet_time', -1, 'frame id in a video to show bullet time')
flags.DEFINE_float('scale', 0.1,
'scale applied to the rendered image (wrt focal length)')
flags.DEFINE_string('rootdir', 'tmp/traj/','root body directory')
flags.DEFINE_string('nvs_outpath', 'tmp/nvs-','output prefix')
def construct_rays_nvs(img_size, rtks, near_far, rndmask, device):
"""
rndmask: controls which pixel to render
"""
bs = rtks.shape[0]
rtks = torch.Tensor(rtks).to(device)
rndmask = torch.Tensor(rndmask).to(device).view(-1)>0
_, xys = sample_xy(img_size, bs, 0, device, return_all=True)
xys=xys[:,rndmask]
Rmat = rtks[:,:3,:3]
Tmat = rtks[:,:3,3]
Kinv = K2inv(rtks[:,3])
rays = raycast(xys, Rmat, Tmat, Kinv, near_far)
return rays
def main(_):
trainer = v2s_trainer(opts, is_eval=True)
data_info = trainer.init_dataset()
trainer.define_model(data_info)
model = trainer.model
model.eval()
nerf_models = model.nerf_models
embeddings = model.embeddings
# bs, 4,4 (R|T)
# (f|p)
rtks = load_root(opts.rootdir, 0) # cap frame=0=>load all
rndsils = load_sils(opts.rootdir.replace('ctrajs', 'refsil'),0)
if opts.maxframe>0:
sample_idx = np.linspace(0,len(rtks)-1,opts.maxframe).astype(int)
rtks = rtks[sample_idx]
rndsils = rndsils[sample_idx]
else:
sample_idx = np.linspace(0,len(rtks)-1, len(rtks)).astype(int)
img_size = rndsils[0].shape
if img_size[0] > img_size[1]:
img_type='vert'
else:
img_type='hori'
# determine render image scale
rtks[:,3] = rtks[:,3]*opts.scale
bs = len(rtks)
img_size = int(max(img_size)*opts.scale)
print("render size: %d"%img_size)
model.img_size = img_size
opts.render_size = img_size
vars_np = {}
vars_np['rtk'] = rtks
vars_np['idk'] = np.ones(bs)
near_far = torch.zeros(bs,2).to(model.device)
near_far = get_near_far(near_far,
vars_np,
pts=model.latest_vars['mesh_rest'].vertices)
vidid = torch.Tensor([opts.vidid]).to(model.device).long()
source_l = model.data_offset[opts.vidid+1] - model.data_offset[opts.vidid] -1
embedid = torch.Tensor(sample_idx).to(model.device).long() + \
model.data_offset[opts.vidid]
if opts.bullet_time>-1: embedid[:] = opts.bullet_time+model.data_offset[opts.vidid]
print(embedid)
rgbs = []
sils = []
viss = []
for i in range(bs):
rndsil = rndsils[i]
rndmask = np.zeros((img_size, img_size))
if img_type=='vert':
size_short_edge = int(rndsil.shape[1] * img_size/rndsil.shape[0])
rndsil = cv2.resize(rndsil, (size_short_edge, img_size))
rndmask[:,:size_short_edge] = rndsil
else:
size_short_edge = int(rndsil.shape[0] * img_size/rndsil.shape[1])
rndsil = cv2.resize(rndsil, (img_size, size_short_edge))
rndmask[:size_short_edge] = rndsil
rays = construct_rays_nvs(model.img_size, rtks[i:i+1],
near_far[i:i+1], rndmask, model.device)
# add env code
rays['env_code'] = model.env_code(embedid[i:i+1])[:,None]
rays['env_code'] = rays['env_code'].repeat(1,rays['nsample'],1)
# add bones
time_embedded = model.pose_code(embedid[i:i+1])[:,None]
rays['time_embedded'] = time_embedded.repeat(1,rays['nsample'],1)
if opts.lbs and model.num_bone_used>0:
bone_rts = model.nerf_body_rts(embedid[i:i+1])
rays['bone_rts'] = bone_rts.repeat(1,rays['nsample'],1)
model.update_delta_rts(rays)
with torch.no_grad():
# render images only
results=defaultdict(list)
bs_rays = rays['bs'] * rays['nsample'] #
for j in range(0, bs_rays, opts.chunk):
rays_chunk = chunk_rays(rays,j,opts.chunk)
rendered_chunks = render_rays(nerf_models,
embeddings,
rays_chunk,
N_samples = opts.ndepth,
perturb=0,
noise_std=0,
chunk=opts.chunk, # chunk size is effective in val mode
use_fine=True,
img_size=model.img_size,
obj_bound = model.latest_vars['obj_bound'],
render_vis=True,
opts=opts,
)
for k, v in rendered_chunks.items():
results[k] += [v]
for k, v in results.items():
v = torch.cat(v, 0)
v = v.view(rays['nsample'], -1)
results[k] = v
rgb = results['img_coarse'].cpu().numpy()
dph = results['depth_rnd'] [...,0].cpu().numpy()
sil = results['sil_coarse'][...,0].cpu().numpy()
vis = results['vis_pred'] [...,0].cpu().numpy()
sil[sil<0.5] = 0
rgb[sil<0.5] = 1
rgbtmp = np.ones((img_size, img_size, 3))
dphtmp = np.ones((img_size, img_size))
siltmp = np.ones((img_size, img_size))
vistmp = np.ones((img_size, img_size))
rgbtmp[rndmask>0] = rgb
dphtmp[rndmask>0] = dph
siltmp[rndmask>0] = sil
vistmp[rndmask>0] = vis
if img_type=='vert':
rgb = rgbtmp[:,:size_short_edge]
sil = siltmp[:,:size_short_edge]
vis = vistmp[:,:size_short_edge]
dph = dphtmp[:,:size_short_edge]
else:
rgb = rgbtmp[:size_short_edge]
sil = siltmp[:size_short_edge]
vis = vistmp[:size_short_edge]
dph = dphtmp[:size_short_edge]
rgbs.append(rgb)
sils.append(sil*255)
viss.append(vis*255)
cv2.imwrite('%s-rgb_%05d.png'%(opts.nvs_outpath,i), rgb[...,::-1]*255)
cv2.imwrite('%s-sil_%05d.png'%(opts.nvs_outpath,i), sil*255)
cv2.imwrite('%s-vis_%05d.png'%(opts.nvs_outpath,i), vis*255)
save_vid('%s-rgb'%(opts.nvs_outpath), rgbs, suffix='.mp4',upsample_frame=0)
save_vid('%s-sil'%(opts.nvs_outpath), sils, suffix='.mp4',upsample_frame=0)
save_vid('%s-vis'%(opts.nvs_outpath), viss, suffix='.mp4',upsample_frame=0)
if __name__ == '__main__':
app.run(main)
|
banmo-main
|
scripts/visualize/nvs.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
"""
bash scripts/render_nvs.sh
"""
from absl import flags, app
import sys
sys.path.insert(0,'')
sys.path.insert(0,'third_party')
import numpy as np
import torch
import os
import glob
import pdb
import cv2
import trimesh
from scipy.spatial.transform import Rotation as R
import imageio
from collections import defaultdict
import matplotlib.cm
cmap = matplotlib.cm.get_cmap('plasma')
from utils.io import save_vid, str_to_frame, save_bones, load_root, load_sils
from utils.colors import label_colormap
from nnutils.train_utils import v2s_trainer
from nnutils.geom_utils import obj_to_cam, tensor2array, vec_to_sim3, \
raycast, sample_xy, K2inv, get_near_far, \
chunk_rays
from nnutils.rendering import render_rays
from ext_utils.util_flow import write_pfm
from ext_utils.flowlib import cat_imgflo
opts = flags.FLAGS
# script specific ones
flags.DEFINE_integer('maxframe', 1, 'maximum number frame to render')
flags.DEFINE_integer('vidid', 0, 'video id that determines the env code')
flags.DEFINE_integer('bullet_time', -1, 'frame id in a video to show bullet time')
flags.DEFINE_float('scale', 0.1,
'scale applied to the rendered image (wrt focal length)')
flags.DEFINE_string('rootdir', 'tmp/traj/','root body directory')
flags.DEFINE_string('nvs_outpath', 'tmp/nvs-','output prefix')
def construct_rays_nvs(img_size, rtks, near_far, rndmask, device):
"""
rndmask: controls which pixel to render
"""
bs = rtks.shape[0]
rtks = torch.Tensor(rtks).to(device)
rndmask = torch.Tensor(rndmask).to(device).view(-1)>0
_, xys = sample_xy(img_size, bs, 0, device, return_all=True)
xys=xys[:,rndmask]
Rmat = rtks[:,:3,:3]
Tmat = rtks[:,:3,3]
Kinv = K2inv(rtks[:,3])
rays = raycast(xys, Rmat, Tmat, Kinv, near_far)
return rays
def main(_):
trainer = v2s_trainer(opts, is_eval=True)
data_info = trainer.init_dataset()
trainer.define_model(data_info)
model = trainer.model
model.eval()
nerf_models = model.nerf_models
embeddings = model.embeddings
# bs, 4,4 (R|T)
# (f|p)
nframe=120
img_size = int(512 * opts.scale)
fl = img_size
pp = img_size/2
rtks = np.zeros((nframe,4,4))
rot1 = cv2.Rodrigues(np.asarray([0,np.pi/2,0]))[0]
rot2 = cv2.Rodrigues(np.asarray([np.pi,0,0]))[0]
rtks[:,:3,:3] = np.dot(rot1, rot2)[None]
rtks[:,2,3] = 0.2
rtks[:,3] = np.asarray([fl,fl,pp,pp])[None]
sample_idx = np.asarray(range(nframe)).astype(int)
# determine render image scale
bs = len(rtks)
print("render size: %d"%img_size)
model.img_size = img_size
opts.render_size = img_size
vars_np = {}
vars_np['rtk'] = rtks
vars_np['idk'] = np.ones(bs)
near_far = torch.zeros(bs,2).to(model.device)
near_far = get_near_far(near_far,
vars_np,
pts=model.latest_vars['mesh_rest'].vertices)
depth_near = near_far[0,0].cpu().numpy()
depth_far = near_far[0,1].cpu().numpy()
vidid = torch.Tensor([opts.vidid]).to(model.device).long()
source_l = model.data_offset[opts.vidid+1] - model.data_offset[opts.vidid] -1
embedid = torch.Tensor(sample_idx).to(model.device).long() + \
model.data_offset[opts.vidid]
print(embedid)
rgbs = []
sils = []
dphs = []
viss = []
for i in range(bs):
model_path = '%s/%s'% (opts.model_path.rsplit('/',1)[0], 'params_%d.pth'%(i))
trainer.load_network(model_path, is_eval=True)# load latest
rndmask = np.ones((img_size, img_size))>0
rays = construct_rays_nvs(model.img_size, rtks[i:i+1],
near_far[i:i+1], rndmask, model.device)
# add env code
rays['env_code'] = model.env_code(embedid[i:i+1])[:,None]
rays['env_code'] = rays['env_code'].repeat(1,rays['nsample'],1)
## add bones
#time_embedded = model.pose_code(embedid[i:i+1])[:,None]
#rays['time_embedded'] = time_embedded.repeat(1,rays['nsample'],1)
#if opts.lbs and model.num_bone_used>0:
# bone_rts = model.nerf_body_rts(embedid[i:i+1])
# rays['bone_rts'] = bone_rts.repeat(1,rays['nsample'],1)
# model.update_delta_rts(rays)
with torch.no_grad():
# render images only
results=defaultdict(list)
bs_rays = rays['bs'] * rays['nsample'] #
for j in range(0, bs_rays, opts.chunk):
rays_chunk = chunk_rays(rays,j,opts.chunk)
rendered_chunks = render_rays(nerf_models,
embeddings,
rays_chunk,
N_samples = opts.ndepth,
perturb=0,
noise_std=0,
chunk=opts.chunk, # chunk size is effective in val mode
use_fine=True,
img_size=model.img_size,
obj_bound = model.latest_vars['obj_bound'],
render_vis=True,
opts=opts,
)
for k, v in rendered_chunks.items():
results[k] += [v]
for k, v in results.items():
v = torch.cat(v, 0)
v = v.view(rays['nsample'], -1)
results[k] = v
rgb = results['img_coarse'].cpu().numpy()
dph = results['depth_rnd'] [...,0].cpu().numpy()
sil = results['sil_coarse'][...,0].cpu().numpy()
vis = results['vis_pred'] [...,0].cpu().numpy()
#sil[sil<0.5] = 0
#rgb[sil<0.5] = 1
rgbtmp = np.ones((img_size, img_size, 3))
dphtmp = np.ones((img_size, img_size))
siltmp = np.ones((img_size, img_size))
vistmp = np.ones((img_size, img_size))
rgbtmp[rndmask>0] = rgb
dphtmp[rndmask>0] = dph
siltmp[rndmask>0] = sil
vistmp[rndmask>0] = vis
rgb = rgbtmp
sil = siltmp
vis = vistmp
dph = dphtmp
dph = (dph - depth_near) / (depth_far - depth_near)*2
dph = np.clip(dph,0,1)
dph = cmap(dph)
rgb = rgb * sil[...,None]
dph = dph * sil[...,None]
rgbs.append(rgb)
sils.append(sil*255)
viss.append(vis*255)
dphs.append(dph*255)
cv2.imwrite('%s-rgb_%05d.png'%(opts.nvs_outpath,i), rgb[...,::-1]*255)
cv2.imwrite('%s-sil_%05d.png'%(opts.nvs_outpath,i), sil*255)
cv2.imwrite('%s-vis_%05d.png'%(opts.nvs_outpath,i), vis*255)
cv2.imwrite('%s-dph_%05d.png'%(opts.nvs_outpath,i), dph[...,::-1]*255)
save_vid('%s-rgb'%(opts.nvs_outpath), rgbs, suffix='.mp4')
save_vid('%s-sil'%(opts.nvs_outpath), sils, suffix='.mp4')
save_vid('%s-vis'%(opts.nvs_outpath), viss, suffix='.mp4')
save_vid('%s-dph'%(opts.nvs_outpath), dphs, suffix='.mp4')
if __name__ == '__main__':
app.run(main)
|
banmo-main
|
scripts/visualize/nvs_iter.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
# TODO: pass ft_cse to use fine-tuned feature
# TODO: pass fine_steps -1 to use fine samples
from absl import flags, app
import sys
sys.path.insert(0,'')
sys.path.insert(0,'third_party')
import numpy as np
from matplotlib import pyplot as plt
import matplotlib
import torch
import os
import glob
import pdb
import cv2
import trimesh
from scipy.spatial.transform import Rotation as R
import imageio
from utils.io import save_vid, str_to_frame, save_bones, draw_lines, vis_match
from utils.colors import label_colormap
from nnutils.train_utils import v2s_trainer
from nnutils.geom_utils import obj_to_cam, tensor2array, vec_to_sim3, obj_to_cam,\
Kmatinv, K2mat, K2inv, sample_xy, resample_dp,\
raycast
from nnutils.loss_utils import kp_reproj, feat_match, kp_reproj_loss
from ext_utils.util_flow import write_pfm
from ext_utils.flowlib import cat_imgflo
opts = flags.FLAGS
def construct_rays(dp_feats_rsmp, model, xys, rand_inds,
Rmat, Tmat, Kinv, near_far, flip=True):
device = dp_feats_rsmp.device
bs,nsample,_ =xys.shape
opts = model.opts
embedid=model.embedid
embedid = embedid.long().to(device)[:,None]
rays = raycast(xys, Rmat, Tmat, Kinv, near_far)
rtk_vec = rays['rtk_vec']
del rays
feats_at_samp = [dp_feats_rsmp[i].view(model.num_feat,-1).T\
[rand_inds[i].long()] for i in range(bs)]
feats_at_samp = torch.stack(feats_at_samp,0) # bs,ns,num_feat
# TODO implement for se3
if opts.lbs and model.num_bone_used>0:
bone_rts = model.nerf_body_rts(embedid)
bone_rts = bone_rts.repeat(1,nsample,1)
# TODO rearrange inputs
feats_at_samp = feats_at_samp.view(-1, model.num_feat)
xys = xys.view(-1,1,2)
if flip:
rtk_vec = rtk_vec.view(bs//2,2,-1).flip(1).view(rtk_vec.shape)
bone_rts = bone_rts.view(bs//2,2,-1).flip(1).view(bone_rts.shape)
rays = {'rtk_vec': rtk_vec,
'bone_rts': bone_rts}
return rays, feats_at_samp, xys
def match_frames(trainer, idxs, nsample=200):
idxs = [int(i) for i in idxs.split(' ')]
bs = len(idxs)
opts = trainer.opts
device = trainer.device
model = trainer.model
model.eval()
# load frames and aux data
for dataset in trainer.evalloader.dataset.datasets:
dataset.load_pair = False
batch = []
for i in idxs:
batch.append( trainer.evalloader.dataset[i] )
batch = trainer.evalloader.collate_fn(batch)
model.set_input(batch)
rtk = model.rtk
Rmat = rtk[:,:3,:3]
Tmat = rtk[:,:3,3]
Kmat = K2mat(rtk[:,3,:])
kaug = model.kaug # according to cropping, p = Kaug Kmat P
Kaug = K2inv(kaug)
Kinv = Kmatinv(Kaug.matmul(Kmat))
near_far = model.near_far[model.frameid.long()]
dp_feats_rsmp = model.dp_feats
# construct rays for sampled pixels
rand_inds, xys = sample_xy(opts.img_size, bs, nsample, device,return_all=False)
rays, feats_at_samp, xys = construct_rays(dp_feats_rsmp, model, xys, rand_inds,
Rmat, Tmat, Kinv, near_far)
model.update_delta_rts(rays)
# re-project
with torch.no_grad():
pts_pred = feat_match(model.nerf_feat, model.embedding_xyz, feats_at_samp,
model.latest_vars['obj_bound'],grid_size=20,is_training=False)
pts_pred = pts_pred.view(bs,nsample,3)
xy_reproj = kp_reproj(pts_pred, model.nerf_models, model.embedding_xyz, rays)
# draw
imgs_trg = model.imgs.view(bs//2,2,-1).flip(1).view(model.imgs.shape)
xy_reproj = xy_reproj.view(bs,nsample,2)
xys = xys.view(bs,nsample, 2)
sil_at_samp = torch.stack([model.masks[i].view(-1,1)[rand_inds[i]] \
for i in range(bs)],0) # bs,ns,1
for i in range(bs):
img1 = model.imgs[i]
img2 = imgs_trg[i]
img = torch.cat([img1, img2],2)
valid_idx = sil_at_samp[i].bool()[...,0]
p1s = xys[i][valid_idx]
p2s = xy_reproj[i][valid_idx]
p2s[...,0] = p2s[...,0] + img1.shape[2]
img = draw_lines(img, p1s,p2s)
cv2.imwrite('tmp/match_%04d.png'%i, img)
# visualize matching error
if opts.render_size<=128:
with torch.no_grad():
rendered, rand_inds = model.nerf_render(rtk, kaug, model.embedid,
nsample=opts.nsample, ndepth=opts.ndepth)
xyz_camera = rendered['xyz_camera_vis'][0].reshape(opts.render_size**2,-1)
xyz_canonical = rendered['xyz_canonical_vis'][0].reshape(opts.render_size**2,-1)
skip_idx = len(xyz_camera)//50 # vis 50 rays
trimesh.Trimesh(xyz_camera[0::skip_idx].reshape(-1,3).cpu()).\
export('tmp/match_camera_pts.obj')
trimesh.Trimesh(xyz_canonical[0::skip_idx].reshape(-1,3).cpu()).\
export('tmp/match_canonical_pts.obj')
vis_match(rendered, model.masks, model.imgs,
bs,opts.img_size, opts.ndepth)
## construct rays for all pixels
#rand_inds, xys = sample_xy(opts.img_size, bs, nsample, device,return_all=True)
#rays, feats_at_samp, xys = construct_rays(dp_feats_rsmp, model, xys, rand_inds,
# Rmat, Tmat, Kinv, near_far, flip=False)
#with torch.no_grad():
# pts_pred = feat_match(model.nerf_feat, model.embedding_xyz, feats_at_samp,
# model.latest_vars['obj_bound'],grid_size=20,is_training=False)
# pts_pred = pts_pred.view(bs,opts.render_size**2,3)
# proj_err = kp_reproj_loss(pts_pred, xys, model.nerf_models,
# model.embedding_xyz, rays)
# proj_err = proj_err.view(pts_pred.shape[:-1]+(1,))
# proj_err = proj_err/opts.img_size * 2
# results = {}
# results['proj_err'] = proj_err
## visualize current error stats
#feat_err=model.latest_vars['fp_err'][:,0]
#proj_err=model.latest_vars['fp_err'][:,1]
#feat_err = feat_err[feat_err>0]
#proj_err = proj_err[proj_err>0]
#print('feat-med: %f'%(np.median(feat_err)))
#print('proj-med: %f'%(np.median(proj_err)))
#plt.hist(feat_err,bins=100)
#plt.savefig('tmp/viser_feat_err.jpg')
#plt.clf()
#plt.hist(proj_err,bins=100)
#plt.savefig('tmp/viser_proj_err.jpg')
# visualize codes
with torch.no_grad():
fid = torch.Tensor(range(0,len(model.impath))).cuda().long()
D=model.pose_code(fid)
D = D.view(len(fid),-1)
##TODO
#px = torch.Tensor(range(len(D))).cuda()
#py = px*2
#pz = px*5+1
#D = torch.stack([px,py,pz],-1)
D = D-D.mean(0)[None]
A = D.T.matmul(D)/D.shape[0] # fxf
U,S,V=torch.svd(A) #
code_proj_3d=D.matmul(V[:,:3])
cmap = matplotlib.cm.get_cmap('cool')
time = np.asarray(range(len(model.impath)))
time = time/time.max()
code_proj_3d=code_proj_3d.detach().cpu().numpy()
trimesh.Trimesh(code_proj_3d, vertex_colors=cmap(time)).export('tmp/0.obj')
#plt.figure(figsize=(16,16))
plot_stack = []
weight_dir = opts.model_path.rsplit('/',1)[0]
bne_path = sorted(glob.glob('%s/%s-*bne-mrender*.jpg'%\
(weight_dir, opts.seqname)))
img_path = model.impath.copy()
## remove the last img for each video to make shape consistent with bone renders
#for i in model.data_offset[1:][::-1]:
# img_path.remove(img_path[i-1])
# code_proj_3d = np.delete(code_proj_3d, i-1,0)
# plot the first video
img_path = img_path [:model.data_offset[1]-2]
code_proj_3d = code_proj_3d[:model.data_offset[1]-2]
try:
bne_path = bne_path [:model.data_offset[1]-2]
except:
pass
for i in range(len(code_proj_3d)):
plt.plot(code_proj_3d[i,0], code_proj_3d[i,1], color=cmap(time[i]), marker='o')
plt.annotate(str(i), (code_proj_3d[i,0], code_proj_3d[i,1]))
plt.xlim(code_proj_3d[:,0].min(), code_proj_3d[:,0].max())
plt.ylim(code_proj_3d[:,1].min(), code_proj_3d[:,1].max())
fig = plt.gcf()
fig.canvas.draw()
plot = np.frombuffer(fig.canvas.tostring_rgb(), dtype=np.uint8)
plot = plot.reshape(fig.canvas.get_width_height()[::-1] + (3,))
print('plot pose code of frame id:%03d'%i)
if len(bne_path) == len(code_proj_3d):
bneimg = cv2.imread(bne_path[i])
bneimg = cv2.resize(bneimg,\
(bneimg.shape[1]*plot.shape[0]//bneimg.shape[0], plot.shape[0]))
img=cv2.imread(img_path[i])[:,:,::-1]
img = cv2.resize(img,\
(img.shape[1]*plot.shape[0]//img.shape[0], plot.shape[0]))
plot = np.hstack([img, bneimg, plot])
plot_stack.append(plot)
save_vid('tmp/code', plot_stack, suffix='.mp4',
upsample_frame=150.,fps=30)
save_vid('tmp/code', plot_stack, suffix='.gif',
upsample_frame=150.,fps=30)
# vis dps
cv2.imwrite('tmp/match_dpc.png', model.dp_vis[model.dps[0].long()].cpu().numpy()*255)
def main(_):
opts.img_size=opts.render_size
trainer = v2s_trainer(opts, is_eval=True)
data_info = trainer.init_dataset()
trainer.define_model(data_info)
#write matching function
img_match = match_frames(trainer, opts.match_frames)
if __name__ == '__main__':
app.run(main)
|
banmo-main
|
scripts/visualize/match.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
"""
python scripts/ama-process/ama2davis.py --path ./database/T_swing/
"""
import pdb
import cv2
import numpy as np
import os
import glob
import argparse
import sys
from shutil import copyfile
sys.path.insert(0,'')
from utils.io import mkdir_p
parser = argparse.ArgumentParser(description='script to render cameras over epochs')
parser.add_argument('--path', default='',
help='path to ama seq dir')
args = parser.parse_args()
path = '%s/images/*'%args.path
seqname = args.path.strip('/').split('/')[-1]
outdir = './database/DAVIS/'
vid_idx = 0
for rgb_path in sorted(glob.glob(path)):
vid_idx_tmp = int(rgb_path.split('/')[-1].split('_')[0][5:])
if vid_idx_tmp != vid_idx:
idx=0
vid_idx = vid_idx_tmp
outsil_dir = '%s/Annotations/Full-Resolution/%s%d'%(outdir, seqname,vid_idx)
outrgb_dir = '%s/JPEGImages/Full-Resolution/%s%d'%(outdir, seqname,vid_idx)
#TODO delete if exists
mkdir_p(outrgb_dir)
mkdir_p(outsil_dir)
sil_path = rgb_path.replace('images', 'silhouettes').replace('Image','Silhouette')
outsil_path = '%s/%05d.png'%(outsil_dir, idx)
sil = cv2.imread(sil_path,0)
sil = (sil>0).astype(np.uint8)
# remove extra sils
nb_components, output, stats, centroids = \
cv2.connectedComponentsWithStats(sil, connectivity=8)
if nb_components>1:
max_label, max_size = max([(i, stats[i, cv2.CC_STAT_AREA]) for i in range(1, nb_components)], key=lambda x: x[1])
sil = output == max_label
sil = (sil>0).astype(np.uint8)*128
cv2.imwrite(outsil_path, sil)
outrgb_path = '%s/%05d.jpg'%(outrgb_dir, idx)
img = cv2.imread(rgb_path)
cv2.imwrite(outrgb_path, img)
print(outrgb_path)
print(outsil_path)
idx = idx+1
|
banmo-main
|
scripts/ama-process/ama2davis.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import numpy as np
import cv2
import pdb
pmat = np.loadtxt('/private/home/gengshany/data/AMA/T_swing/calibration/Camera1.Pmat.cal')
K,R,T,_,_,_,_=cv2.decomposeProjectionMatrix(pmat)
print(K/K[-1,-1])
print(R)
print(T/T[-1])
pdb.set_trace()
|
banmo-main
|
scripts/ama-process/read_cam.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import sys
sys.path.insert(0,'third_party')
sys.path.insert(0,'./')
import numpy as np
import trimesh
import torch
import cv2
import pdb
from scipy.spatial.transform import Rotation as R
from nnutils.geom_utils import obj_to_cam, pinhole_cam, render_color, render_flow
from ext_utils.flowlib import flow_to_image
from ext_utils.util_flow import write_pfm
from utils.io import mkdir_p
import soft_renderer as sr
import argparse
parser = argparse.ArgumentParser(description='render data')
parser.add_argument('--outdir', default='eagle',
help='output dir')
parser.add_argument('--model', default='eagle',
help='model to render, {eagle, hands}')
parser.add_argument('--rot_axis', default='y',
help='axis to rotate around')
parser.add_argument('--nframes', default=3,type=int,
help='number of frames to render')
parser.add_argument('--alpha', default=1.,type=float,
help='0-1, percentage of a full cycle')
parser.add_argument('--init_a', default=0.25,type=float,
help='0-1, percentage of a full cycle for initial pose')
parser.add_argument('--xspeed', default=0,type=float,
help='times speed up')
parser.add_argument('--focal', default=2,type=float,
help='focal length')
parser.add_argument('--d_obj', default=3,type=float,
help='object depth')
parser.add_argument('--can_rand', dest='can_rand',action='store_true',
help='ranomize canonical space')
parser.add_argument('--img_size', default=512,type=int,
help='image size')
parser.add_argument('--render_flow', dest='render_flow',action='store_true',
help='render flow')
args = parser.parse_args()
## io
img_size = args.img_size
bgcolor = None
#bgcolor = np.asarray([0,0,0])
d_obj = args.d_obj
filedir='database'
rot_rand = torch.Tensor(R.random().as_matrix()).cuda()
overts_list = []
for i in range(args.nframes):
if args.model=='eagle':
mesh = sr.Mesh.from_obj('database/eagle/Eagle-original_%06d.obj'%int(i*args.xspeed), load_texture=True, texture_res=5, texture_type='surface')
elif args.model=='hands':
mesh = sr.Mesh.from_obj('database/hands/hands_%06d.obj'%int(1+i*args.xspeed), load_texture=True, texture_res=100, texture_type='surface')
overts = mesh.vertices
if i==0:
center = overts.mean(1)[:,None]
scale = max((overts - center)[0].abs().max(0)[0])
overts -= center
overts *= 1.0 / float(scale)
overts[:,:,1]*= -1 # aligh with camera coordinate
# random rot
if args.can_rand:
overts[0] = overts[0].matmul(rot_rand.T)
overts_list.append(overts)
colors=mesh.textures
faces = mesh.faces
mkdir_p( '%s/DAVIS/JPEGImages/Full-Resolution/%s/' %(filedir,args.outdir))
mkdir_p( '%s/DAVIS/Annotations/Full-Resolution/%s/' %(filedir,args.outdir))
mkdir_p( '%s/DAVIS/Cameras/Full-Resolution/%s/' %(filedir,args.outdir))
mkdir_p( '%s/DAVIS/Meshes/Full-Resolution/%s/' %(filedir,args.outdir))
# soft renderer
renderer = sr.SoftRenderer(image_size=img_size, sigma_val=1e-12,
camera_mode='look_at',perspective=False, aggr_func_rgb='hard',
light_mode='vertex', light_intensity_ambient=1.,light_intensity_directionals=0.)
#light_intensity_ambient=0.,light_intensity_directionals=1., light_directions=[-1.,-0.5,1.])
verts_ndc_list = []
for i in range(0,args.nframes):
verts = overts_list[i]
# set cameras
#rotx = np.random.rand()
if args.rot_axis=='x':
rotx = args.init_a*6.28+args.alpha*6.28*i/args.nframes
else:
rotx=0.
# if i==0: rotx=0.
if args.rot_axis=='y':
roty = args.init_a*6.28+args.alpha*6.28*i/args.nframes
else:
roty = 0
rotz = 0.
Rmat = cv2.Rodrigues(np.asarray([rotx, roty, rotz]))[0]
Rmat = torch.Tensor(Rmat).cuda()
# random rot
if args.can_rand:
Rmat = Rmat.matmul(rot_rand.T)
Tmat = torch.Tensor([0,0,d_obj] ).cuda()
K = torch.Tensor([args.focal,args.focal,0,0] ).cuda()
Kimg = torch.Tensor([args.focal*img_size/2.,args.focal*img_size/2.,img_size/2.,img_size/2.] ).cuda()
# add RTK: [R_3x3|T_3x1]
# [fx,fy,px,py], to the ndc space
rtk = np.zeros((4,4))
rtk[:3,:3] = Rmat.cpu().numpy()
rtk[:3, 3] = Tmat.cpu().numpy()
rtk[3, :] = Kimg .cpu().numpy()
# obj-cam transform
verts = obj_to_cam(verts, Rmat, Tmat)
mesh_cam = trimesh.Trimesh(vertices=verts[0].cpu().numpy(),
faces=faces[0].cpu().numpy())
trimesh.repair.fix_inversion(mesh_cam)
# pespective projection
verts = pinhole_cam(verts, K)
verts_ndc_list.append(verts.clone())
# render sil+rgb
rendered = render_color(renderer, verts, faces, colors, texture_type='surface')
rendered_img = rendered[0,:3].permute(1,2,0).cpu().numpy()*255
rendered_sil = rendered[0,-1].cpu().numpy()*128
if bgcolor is None:
bgcolor = 255-rendered_img[rendered_sil.astype(bool)].mean(0)
rendered_img[~rendered_sil.astype(bool)]=bgcolor[None]
cv2.imwrite('%s/DAVIS/JPEGImages/Full-Resolution/%s/%05d.jpg' %(filedir,args.outdir,i),rendered_img[:,:,::-1])
cv2.imwrite('%s/DAVIS/Annotations/Full-Resolution/%s/%05d.png' %(filedir,args.outdir,i),rendered_sil)
np.savetxt('%s/DAVIS/Cameras/Full-Resolution/%s/%05d.txt' %(filedir,args.outdir,i),rtk)
mesh_cam.export('%s/DAVIS/Meshes/Full-Resolution/%s/%05d.obj' %(filedir,args.outdir,i))
print(i)
if args.render_flow:
for dframe in [1,2,4,8,16,32]:
print('dframe: %d'%(dframe))
flobw_outdir = '%s/DAVIS/FlowBW_%d/Full-Resolution/%s/'%(filedir,dframe,args.outdir)
flofw_outdir = '%s/DAVIS/FlowFW_%d/Full-Resolution/%s/'%(filedir,dframe,args.outdir)
mkdir_p(flofw_outdir)
mkdir_p(flobw_outdir)
# render flow
occ = -np.ones((img_size, img_size)).astype(np.float32)
for i in range(dframe,args.nframes):
verts_ndc = verts_ndc_list[i-dframe]
verts_ndc_n = verts_ndc_list[i]
flow_fw = render_flow(renderer, verts_ndc, faces, verts_ndc_n)
flow_bw = render_flow(renderer, verts_ndc_n, faces, verts_ndc)
# to pixels
flow_fw = flow_fw*(img_size-1)/2
flow_bw = flow_bw*(img_size-1)/2
flow_fw = flow_fw.cpu().numpy()[0]
flow_bw = flow_bw.cpu().numpy()[0]
write_pfm( '%s/flo-%05d.pfm'%(flofw_outdir,i-dframe),flow_fw)
write_pfm( '%s/flo-%05d.pfm'%(flobw_outdir,i), flow_bw)
write_pfm( '%s/occ-%05d.pfm'%(flofw_outdir,i-dframe),occ)
write_pfm( '%s/occ-%05d.pfm'%(flobw_outdir,i), occ)
cv2.imwrite('%s/col-%05d.jpg'%(flofw_outdir,i-dframe),flow_to_image(flow_fw)[:,:,::-1])
cv2.imwrite('%s/col-%05d.jpg'%(flobw_outdir,i), flow_to_image(flow_bw)[:,:,::-1])
|
banmo-main
|
scripts/synthetic/render_synthetic.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
# python scripts/eval_root.py cam-files/adult7-b25/ cam-files/adult-masked-cam/ 1000
import sys, os
sys.path.append(os.path.dirname(os.path.dirname(sys.path[0])))
os.environ["PYOPENGL_PLATFORM"] = "egl" #opengl seems to only work with TPU
curr_dir = os.path.abspath(os.getcwd())
sys.path.insert(0,curr_dir)
import pdb
import glob
import numpy as np
import torch
import cv2
import soft_renderer as sr
import argparse
import trimesh
import configparser
from utils.io import config_to_dataloader, draw_cams, load_root
from nnutils.geom_utils import rot_angle, align_sim3
root_a_dir=sys.argv[1]
root_b_dir=sys.argv[2]
cap_frame=int(sys.argv[3])
def umeyama_alignment(x, y, with_scale=False):
"""
https://github.com/Huangying-Zhan/kitti-odom-eval/blob/master/kitti_odometry.py
Computes the least squares solution parameters of an Sim(m) matrix
that minimizes the distance between a set of registered points.
Umeyama, Shinji: Least-squares estimation of transformation parameters
between two point patterns. IEEE PAMI, 1991
:param x: mxn matrix of points, m = dimension, n = nr. of data points
:param y: mxn matrix of points, m = dimension, n = nr. of data points
:param with_scale: set to True to align also the scale (default: 1.0 scale)
:return: r, t, c - rotation matrix, translation vector and scale factor
"""
if x.shape != y.shape:
assert False, "x.shape not equal to y.shape"
# m = dimension, n = nr. of data points
m, n = x.shape
# means, eq. 34 and 35
mean_x = x.mean(axis=1)
mean_y = y.mean(axis=1)
# variance, eq. 36
# "transpose" for column subtraction
sigma_x = 1.0 / n * (np.linalg.norm(x - mean_x[:, np.newaxis])**2)
# covariance matrix, eq. 38
outer_sum = np.zeros((m, m))
for i in range(n):
outer_sum += np.outer((y[:, i] - mean_y), (x[:, i] - mean_x))
cov_xy = np.multiply(1.0 / n, outer_sum)
# SVD (text betw. eq. 38 and 39)
u, d, v = np.linalg.svd(cov_xy)
# S matrix, eq. 43
s = np.eye(m)
if np.linalg.det(u) * np.linalg.det(v) < 0.0:
# Ensure a RHS coordinate system (Kabsch algorithm).
s[m - 1, m - 1] = -1
# rotation, eq. 40
r = u.dot(s).dot(v)
# scale & translation, eq. 42 and 41
c = 1 / sigma_x * np.trace(np.diag(d).dot(s)) if with_scale else 1.0
t = mean_y - np.multiply(c, r.dot(mean_x))
return r, t, c
def main():
rootlist_a = load_root(root_a_dir, cap_frame)
rootlist_b = load_root(root_b_dir, cap_frame)
# align
rootlist_b = align_sim3(rootlist_a, rootlist_b)
# construct camera mesh
mesh_a = draw_cams(rootlist_a, color='gray')
mesh_b = draw_cams(rootlist_b)
mesh = trimesh.util.concatenate([mesh_a, mesh_b])
mesh.export('0.obj')
# python ... path to camera folder
# will draw a trajectory of camera locations
if __name__ == '__main__':
main()
|
banmo-main
|
scripts/eval/eval_root.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pdb
import os.path as osp
import sys
sys.path.insert(0,'third_party')
import numpy as np
from absl import flags, app
import torch
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
from torch.utils.data.dataloader import default_collate
import torch.nn.functional as F
import cv2
import time
from scipy.ndimage import binary_erosion
from ext_utils.util_flow import readPFM
from ext_utils.flowlib import warp_flow
from nnutils.geom_utils import resample_dp
def read_json(filepath, mask):
import json
with open(filepath) as f:
maxscore=-1
for pid in json.load(f)['people']:
ppose = np.asarray(pid['pose_keypoints_2d']).reshape((-1,3))
pocc = cv2.remap(mask.astype(int), ppose[:,0].astype(np.float32),ppose[:,1].astype(np.float32),interpolation=cv2.INTER_NEAREST)
pscore = pocc.sum()
if pscore>maxscore: maxscore = pscore; maxpose = ppose
return maxpose
# -------------- Dataset ------------- #
# ------------------------------------ #
class BaseDataset(Dataset):
'''
img, mask, kp, pose data loader
'''
def __init__(self, opts, filter_key=None):
# Child class should define/load:
# self.kp_perm
# self.img_dir
# self.anno
# self.anno_sfm
self.opts = opts
self.img_size = opts['img_size']
self.filter_key = filter_key
self.flip=0
self.crop_factor = 1.2
self.load_pair = True
self.spec_dt = 0 # whether to specify the dframe, only in preload
def mirror_image(self, img, mask):
if np.random.rand(1) > 0.5:
# Need copy bc torch collate doesnt like neg strides
img_flip = img[:, ::-1, :].copy()
mask_flip = mask[:, ::-1].copy()
return img_flip, mask_flip
else:
return img, mask
def __len__(self):
return self.num_imgs
def read_raw(self, im0idx, flowfw,dframe):
#ss = time.time()
img_path = self.imglist[im0idx]
img = cv2.imread(img_path)[:,:,::-1] / 255.0
shape = img.shape
if len(shape) == 2:
img = np.repeat(np.expand_dims(img, 2), 3, axis=2)
mask = cv2.imread(self.masklist[im0idx],0)
#print('mask+img:%f'%(time.time()-ss))
mask = mask/np.sort(np.unique(mask))[1]
occluder = mask==255
mask[occluder] = 0
if mask.shape[0]!=img.shape[0] or mask.shape[1]!=img.shape[1]:
mask = cv2.resize(mask, img.shape[:2][::-1],interpolation=cv2.INTER_NEAREST)
mask = binary_erosion(mask,iterations=2)
mask = np.expand_dims(mask, 2)
#print('mask sort:%f'%(time.time()-ss))
# flow
if flowfw:
flowpath = self.flowfwlist[im0idx]
else:
flowpath = self.flowbwlist[im0idx]
flowpath = flowpath.replace('FlowBW', 'FlowBW_%d'%(dframe)).\
replace('FlowFW', 'FlowFW_%d'%(dframe))
try:
flow = readPFM(flowpath)[0]
occ = readPFM(flowpath.replace('flo-', 'occ-'))[0]
h,w,_ = mask.shape
oh,ow=flow.shape[:2]
factor_h = h/oh
factor_w = w/ow
flow = cv2.resize(flow, (w,h))
occ = cv2.resize(occ, (w,h))
flow[...,0] *= factor_w
flow[...,1] *= factor_h
except:
print('warning: loading empty flow from %s'%(flowpath))
flow = np.zeros_like(img)
occ = np.zeros_like(mask)
flow = flow[...,:2]
occ[occluder] = 0
#print('flo:%f'%(time.time()-ss))
try:
dp = readPFM(self.dplist[im0idx])[0]
except:
print('error loading densepose surface')
dp = np.zeros_like(occ)
try:
dp_feat = readPFM(self.featlist[im0idx])[0]
dp_bbox = np.loadtxt(self.bboxlist[im0idx])
except:
print('error loading densepose feature')
dp_feat = np.zeros((16*112,112))
dp_bbox = np.zeros((4))
dp= (dp *50).astype(np.int32)
dp_feat = dp_feat.reshape((16,112,112)).copy()
#print('dp:%f'%(time.time()-ss))
# add RTK: [R_3x3|T_3x1]
# [fx,fy,px,py], to the ndc space
try:
rtk_path = self.rtklist[im0idx]
rtk = np.loadtxt(rtk_path)
except:
#print('warning: loading empty camera')
#print(rtk_path)
rtk = np.zeros((4,4))
rtk[:3,:3] = np.eye(3)
rtk[:3, 3] = np.asarray([0,0,10])
rtk[3, :] = np.asarray([512,512,256,256])
# create mask for visible vs unkonwn
vis2d = np.ones_like(mask)
#print('rtk:%f'%(time.time()-ss))
# crop the image according to mask
kaug, hp0, A, B= self.compute_crop_params(mask)
#print('crop params:%f'%(time.time()-ss))
x0 = hp0[:,:,0].astype(np.float32)
y0 = hp0[:,:,1].astype(np.float32)
img = cv2.remap(img,x0,y0,interpolation=cv2.INTER_LINEAR)
mask = cv2.remap(mask.astype(int),x0,y0,interpolation=cv2.INTER_NEAREST)
flow = cv2.remap(flow,x0,y0,interpolation=cv2.INTER_LINEAR)
occ = cv2.remap(occ,x0,y0,interpolation=cv2.INTER_LINEAR)
dp =cv2.remap(dp, x0,y0,interpolation=cv2.INTER_NEAREST)
vis2d=cv2.remap(vis2d.astype(int),x0,y0,interpolation=cv2.INTER_NEAREST)
#print('crop:%f'%(time.time()-ss))
# Finally transpose the image to 3xHxW
img = np.transpose(img, (2, 0, 1))
mask = (mask>0).astype(float)
#TODO transform dp feat to same size as img
dp_feat_rsmp = resample_dp(F.normalize(torch.Tensor(dp_feat)[None],2,1),
torch.Tensor(dp_bbox)[None],
torch.Tensor(kaug )[None],
self.img_size)
rt_dict = {}
rt_dict['img'] = img
rt_dict['mask'] = mask
rt_dict['flow'] = flow
rt_dict['occ'] = occ
rt_dict['dp'] = dp
rt_dict['vis2d'] = vis2d
rt_dict['dp_feat'] = dp_feat
rt_dict['dp_feat_rsmp'] = dp_feat_rsmp
rt_dict['dp_bbox'] = dp_bbox
rt_dict['rtk'] = rtk
return rt_dict, kaug, hp0, A,B
def compute_crop_params(self, mask):
#ss=time.time()
indices = np.where(mask>0); xid = indices[1]; yid = indices[0]
center = ( (xid.max()+xid.min())//2, (yid.max()+yid.min())//2)
length = ( (xid.max()-xid.min())//2, (yid.max()-yid.min())//2)
length = (int(self.crop_factor*length[0]), int(self.crop_factor*length[1]))
#print('center:%f'%(time.time()-ss))
maxw=self.img_size;maxh=self.img_size
orisize = (2*length[0], 2*length[1])
alp = [orisize[0]/maxw ,orisize[1]/maxw]
# intrinsics induced by augmentation: augmented to to original img
# correct cx,cy at clip space (not tx, ty)
if self.flip==0:
pps = np.asarray([float( center[0] - length[0] ), float( center[1] - length[1] )])
else:
pps = np.asarray([-float( center[0] - length[0] ), float( center[1] - length[1] )])
kaug = np.asarray([alp[0], alp[1], pps[0], pps[1]])
x0,y0 =np.meshgrid(range(maxw),range(maxh))
A = np.eye(3)
B = np.asarray([[alp[0],0,(center[0]-length[0])],
[0,alp[1],(center[1]-length[1])],
[0,0,1]]).T
hp0 = np.stack([x0,y0,np.ones_like(x0)],-1) # screen coord
hp0 = np.dot(hp0,A.dot(B)) # image coord
return kaug, hp0, A,B
def flow_process(self,flow, flown, occ, occn, hp0, hp1, A,B,Ap,Bp):
maxw=self.img_size;maxh=self.img_size
# augmenta flow
hp1c = np.concatenate([flow[:,:,:2] + hp0[:,:,:2], np.ones_like(hp0[:,:,:1])],-1) # image coord
hp1c = hp1c.dot(np.linalg.inv(Ap.dot(Bp))) # screen coord
flow[:,:,:2] = hp1c[:,:,:2] - np.stack(np.meshgrid(range(maxw),range(maxh)),-1)
hp0c = np.concatenate([flown[:,:,:2] +hp1[:,:,:2], np.ones_like(hp0[:,:,:1])],-1) # image coord
hp0c = hp0c.dot(np.linalg.inv(A.dot(B))) # screen coord
flown[:,:,:2] =hp0c[:,:,:2] - np.stack(np.meshgrid(range(maxw),range(maxh)),-1)
#fb check
x0,y0 =np.meshgrid(range(maxw),range(maxh))
hp0 = np.stack([x0,y0],-1) # screen coord
#hp0 = np.stack([x0,y0,np.ones_like(x0)],-1) # screen coord
dis = warp_flow(hp0 + flown, flow[:,:,:2]) - hp0
dis = np.linalg.norm(dis[:,:,:2],2,-1)
occ = dis / self.img_size * 2
#occ = np.exp(-5*occ) # 1/5 img size
occ = np.exp(-25*occ)
occ[occ<0.25] = 0. # this corresp to 1/40 img size
#dis = np.linalg.norm(dis[:,:,:2],2,-1) * 0.1
#occ[occ!=0] = dis[occ!=0]
disn = warp_flow(hp0 + flow, flown[:,:,:2]) - hp0
disn = np.linalg.norm(disn[:,:,:2],2,-1)
occn = disn / self.img_size * 2
occn = np.exp(-25*occn)
occn[occn<0.25] = 0.
#disn = np.linalg.norm(disn[:,:,:2],2,-1) * 0.1
#occn[occn!=0] = disn[occn!=0]
# ndc
flow[:,:,0] = 2 * (flow[:,:,0]/maxw)
flow[:,:,1] = 2 * (flow[:,:,1]/maxh)
#flow[:,:,2] = np.logical_and(flow[:,:,2]!=0, occ<10) # as the valid pixels
flown[:,:,0] = 2 * (flown[:,:,0]/maxw)
flown[:,:,1] = 2 * (flown[:,:,1]/maxh)
#flown[:,:,2] = np.logical_and(flown[:,:,2]!=0, occn<10) # as the valid pixels
flow = np.transpose(flow, (2, 0, 1))
flown = np.transpose(flown, (2, 0, 1))
return flow, flown, occ, occn
def load_data(self, index):
#pdb.set_trace()
#ss = time.time()
try:dataid = self.dataid
except: dataid=0
im0idx = self.baselist[index]
dir_fac = self.directlist[index]*2-1
dframe_list = [2,4,8,16,32]
max_id = max(self.baselist)
dframe_list = [1] + [i for i in dframe_list if (im0idx%i==0) and \
int(im0idx+i*dir_fac) <= max_id]
dframe = np.random.choice(dframe_list)
if self.spec_dt>0:dframe=self.dframe
if self.directlist[index]==1:
# forward flow
im1idx = im0idx + dframe
flowfw = True
else:
im1idx = im0idx - dframe
flowfw = False
rt_dict, kaug, hp0, A,B = self.read_raw(im0idx, flowfw=flowfw,
dframe=dframe)
img = rt_dict['img']
mask = rt_dict['mask']
flow = rt_dict['flow']
occ = rt_dict['occ']
dp = rt_dict['dp']
vis2d = rt_dict['vis2d']
dp_feat = rt_dict['dp_feat']
dp_bbox = rt_dict['dp_bbox']
rtk = rt_dict['rtk']
dp_feat_rsmp = rt_dict['dp_feat_rsmp']
frameid = im0idx
is_canonical = self.can_frame == im0idx
#print('before 2nd read-raw:%f'%(time.time()-ss))
if self.load_pair:
rt_dictn,kaugn,hp1,Ap,Bp = self.read_raw(im1idx, flowfw=(not flowfw),
dframe=dframe)
imgn = rt_dictn['img']
maskn = rt_dictn['mask']
flown = rt_dictn['flow']
occn = rt_dictn['occ']
dpn = rt_dictn['dp']
vis2dn= rt_dictn['vis2d']
dp_featn = rt_dictn['dp_feat']
dp_bboxn = rt_dictn['dp_bbox']
rtkn = rt_dictn['rtk']
dp_featn_rsmp = rt_dictn['dp_feat_rsmp']
is_canonicaln = self.can_frame == im1idx
#print('before process:%f'%(time.time()-ss))
flow, flown, occ, occn = self.flow_process(flow, flown, occ, occn,
hp0, hp1, A,B,Ap,Bp)
#print('after process:%f'%(time.time()-ss))
# stack data
img = np.stack([img, imgn])
mask= np.stack([mask,maskn])
flow= np.stack([flow, flown])
occ = np.stack([occ, occn])
dp = np.stack([dp, dpn])
vis2d= np.stack([vis2d, vis2dn])
dp_feat= np.stack([dp_feat, dp_featn])
dp_feat_rsmp= np.stack([dp_feat_rsmp, dp_featn_rsmp])
dp_bbox = np.stack([dp_bbox, dp_bboxn])
rtk= np.stack([rtk, rtkn])
kaug= np.stack([kaug,kaugn])
dataid= np.stack([dataid, dataid])
frameid= np.stack([im0idx, im1idx])
is_canonical= np.stack([is_canonical, is_canonicaln])
elem = {}
elem['img'] = img # s
elem['mask'] = mask # s
elem['flow'] = flow # s
elem['occ'] = occ # s
elem['dp'] = dp # x
elem['dp_feat'] = dp_feat # y
elem['dp_feat_rsmp'] = dp_feat_rsmp # y
elem['dp_bbox'] = dp_bbox
elem['vis2d'] = vis2d # y
elem['rtk'] = rtk
elem['kaug'] = kaug
elem['dataid'] = dataid
elem['frameid'] = frameid
elem['is_canonical'] = is_canonical
return elem
def preload_data(self, index):
#TODO combine to a single function with load_data
try:dataid = self.dataid
except: dataid=0
im0idx = self.baselist[index]
dir_fac = self.directlist[index]*2-1
dframe_list = [2,4,8,16,32]
max_id = max(self.baselist)
dframe_list = [1] + [i for i in dframe_list if (im0idx%i==0) and \
int(im0idx+i*dir_fac) <= max_id]
dframe = np.random.choice(dframe_list)
if self.spec_dt>0:dframe=self.dframe
save_dir = self.imglist[0].replace('JPEGImages', 'Preload').rsplit('/',1)[0]
data_path = '%s/%d_%05d.npy'%(save_dir, dframe, im0idx)
elem = np.load(data_path,allow_pickle=True).item()
# modify dataid according to training time ones
elem['dataid'] = np.stack([dataid, dataid])[None]
# reload rtk based on rtk predictions
# add RTK: [R_3x3|T_3x1]
# [fx,fy,px,py], to the ndc space
# always forward flow
im1idx = im0idx + dframe
try:
rtk_path = self.rtklist[im0idx]
rtk = np.loadtxt(rtk_path)
rtkn_path = self.rtklist[im1idx]
rtkn = np.loadtxt(rtkn_path)
rtk = np.stack([rtk, rtkn])
except:
#print('warning: loading empty camera')
#print(rtk_path)
rtk = np.zeros((4,4))
rtk[:3,:3] = np.eye(3)
rtk[:3, 3] = np.asarray([0,0,10])
rtk[3, :] = np.asarray([512,512,256,256])
rtkn = rtk.copy()
rtk = np.stack([rtk, rtkn])
elem['rtk']= rtk[None]
for k in elem.keys():
elem[k] = elem[k][0]
if not self.load_pair:
elem[k] = elem[k][:1]
# deal with img_size (only for eval visualization purpose)
current_size = elem['img'].shape[-1]
# how to make sure target_size is even
# target size (512?) + 2pad = image size (512)
target_size = int(self.img_size / self.crop_factor * 1.2 /2) * 2
pad = (self.img_size - target_size)//2
for k in ['img', 'mask', 'flow', 'occ', 'dp', 'vis2d']:
tensor = torch.Tensor(elem[k]).view(1,-1,current_size, current_size)
tensor = F.interpolate(tensor, (target_size, target_size),
mode='nearest')
tensor = F.pad(tensor, (pad, pad, pad, pad))
elem[k] = tensor.numpy()
# deal with intrinsics change due to crop factor
length = elem['kaug'][:,:2] * 512 / 2 / 1.2
elem['kaug'][:,2:] += length*(1.2-self.crop_factor)
elem['kaug'][:,:2] *= current_size/float(target_size)
return elem
def __getitem__(self, index):
if self.preload:
# find the corresponding fw index in the dataset
if self.directlist[index] != 1:
refidx = self.baselist[index]-1
same_idx = np.where(np.asarray(self.baselist)==refidx)[0]
index = sorted(same_idx)[0]
try:
# fail loading the last index of the dataset
elem = self.preload_data(index)
except:
print('loading %d failed'%index)
elem = self.preload_data(0)
else:
elem = self.load_data(index)
return elem
|
banmo-main
|
dataloader/vidbase.py
|
banmo-main
|
dataloader/__init__.py
|
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path as osp
import numpy as np
import scipy.io as sio
from absl import flags, app
import random
import torch
from torch.utils.data import Dataset
import pdb
import glob
from torch.utils.data import DataLoader
import configparser
from utils.io import config_to_dataloader
opts = flags.FLAGS
def _init_fn(worker_id):
np.random.seed(1003)
random.seed(1003)
#----------- Data Loader ----------#
#----------------------------------#
def data_loader(opts_dict, shuffle=True):
num_workers = opts_dict['n_data_workers'] * opts_dict['batch_size']
num_workers = min(num_workers, 8)
#num_workers = 0
print('# workers: %d'%num_workers)
print('# pairs: %d'%opts_dict['batch_size'])
data_inuse = config_to_dataloader(opts_dict)
sampler = torch.utils.data.distributed.DistributedSampler(
data_inuse,
num_replicas=opts_dict['ngpu'],
rank=opts_dict['local_rank'],
shuffle=True
)
data_inuse = DataLoader(data_inuse,
batch_size= opts_dict['batch_size'], num_workers=num_workers,
drop_last=True, worker_init_fn=_init_fn, pin_memory=True,
sampler=sampler)
return data_inuse
#----------- Eval Data Loader ----------#
#----------------------------------#
def eval_loader(opts_dict):
num_workers = 0
dataset = config_to_dataloader(opts_dict,is_eval=True)
dataset = DataLoader(dataset,
batch_size= 1, num_workers=num_workers, drop_last=False, pin_memory=True, shuffle=False)
return dataset
|
banmo-main
|
dataloader/frameloader.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
from setuptools import setup, find_packages
setup(
name='clutrr',
version='1.0.0',
description='Compositional Language Understanding with Text-based Relational Reasoning',
packages=find_packages(exclude=(
'data', 'mturk')),
include_package_data=True,
)
|
clutrr-main
|
setup.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
# Clean the templates from mturk annotated data
# Input = mturk annotated file (amt_mturk.csv)
# Output = placeholder json
import pandas as pd
import argparse
from nltk.tokenize import word_tokenize
import difflib
import json
from sacremoses import MosesDetokenizer
detokenizer = MosesDetokenizer()
def extract_placeholder(df):
"""
Given the AMT annotated datasets, extract the placeholders.
Important to maintain the order of the entities after being matched
For example, to replace a proof state (2,3),(3,4), the order is
important.
For the paper, we provide the set of cleaned train and test splits for the placeholders
See `Clutrr.setup()` for download locations
:param df:
:return:
"""
#skipped = [109] # skipping the Jose - Richard row, shouldn't have approved it
skipped = []
for i, row in df.iterrows():
story = row['paraphrase']
ents_gender = {dd.split(':')[0]: dd.split(':')[1] for dd in row['genders'].split(',')}
words = word_tokenize(story)
ent_id_g = {}
if i in skipped:
continue
# skipping a problematic row where two names are very similar.
# TODO: remove this from the AMT study as well
if 'Micheal' in ents_gender and 'Michael' in ents_gender:
skipped.append(i)
continue
# build entity -> key list
# here order of entity is important, so first we fetch the ordering from
# the proof state
proof = eval(row['proof_state'])
m_built = []
if len(proof) > 0:
built = []
for prd in proof:
pr_lhs = list(prd.keys())[0]
pr_rhs = prd[pr_lhs]
if pr_lhs not in built:
built.extend(pr_rhs)
else:
pr_i = built.index(pr_lhs)
built[pr_i] = pr_rhs
for b in built:
if type(b) != list:
m_built.append(b)
else:
m_built.extend(b)
else:
# when there is no proof state, consider the order from query
query = eval(row['query'])
m_built.append((query[0], '', query[-1]))
# with the proof state, create an ordered ENT_id_gender dict
ent_gender_keys = {}
ordered_ents = []
# add entities in the dictionary
def add_ent(entity):
if entity not in ent_gender_keys:
ent_gender_keys[entity] = 'ENT_{}_{}'.format(len(ent_gender_keys), ents_gender[entity])
ordered_ents.append(entity)
for edge in m_built:
add_ent(edge[0])
add_ent(edge[-1])
if len(ordered_ents) != len(ents_gender):
print(i)
return
for ent_id, (ent, gender) in enumerate(ents_gender.items()):
matches = difflib.get_close_matches(ent, words, cutoff=0.9)
if len(matches) == 0:
print(row['paraphrase'])
print(ent)
return
match_idxs = [i for i, x in enumerate(words) if x in matches]
for wi in match_idxs:
words[wi] = ent_gender_keys[ent]
ent_id_g[ent_id] = gender
gender_key = '-'.join([ents_gender[ent] for ent in ordered_ents])
replaced = detokenizer.detokenize(words, return_str=True)
df.at[i, 'template'] = replaced
df.at[i, 'template_gender'] = gender_key
print('Skipped', skipped)
return df, skipped
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--mfile', type=str, default='amt_mturk.csv', help='MTurk generated file')
parser.add_argument('--outfile', type=str, default='amt_placeholders', help='placeholders json file')
parser.add_argument('--split', type=float, default=0.8, help='Train/Test split.')
args = parser.parse_args()
df = pd.read_csv(args.mfile)
# do not use the rejected samples
df = df[df.review != 'rejected']
print("Number of accepted rows : {}".format(len(df)))
df, skipped = extract_placeholder(df)
# create a json file for easy lookup
placeholders = {}
for i, row in df.iterrows():
if i in skipped:
continue
if row['f_comb'] not in placeholders:
placeholders[row['f_comb']] = {}
if row['template_gender'] not in placeholders[row['f_comb']]:
placeholders[row['f_comb']][row['template_gender']] = []
placeholders[row['f_comb']][row['template_gender']].append(row['template'])
# training and testing split of the placeholders
train_p = {}
test_p = {}
for key, gv in placeholders.items():
if key not in train_p:
train_p[key] = {}
test_p[key] = {}
for gk, ps in gv.items():
split = int(len(placeholders[key][gk]) * args.split)
train_p[key][gk] = placeholders[key][gk][:split]
test_p[key][gk] = placeholders[key][gk][split:]
# save
json.dump(train_p, open(args.outfile + '.train.json','w'))
json.dump(test_p, open(args.outfile + '.test.json', 'w'))
json.dump(placeholders, open(args.outfile + '.json','w'))
print("Done.")
if __name__ == '__main__':
main()
|
clutrr-main
|
clutrr/template_mturk.py
|
clutrr-main
|
clutrr/__init__.py
|
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
# Generate story-summary pairs
from clutrr.actors.ancestry import Ancestry
from clutrr.relations.builder import RelationBuilder
from tqdm import tqdm
import random
import numpy as np
import json
import copy
from clutrr.args import get_args
from clutrr.store.store import Store
from clutrr.utils.utils import comb_indexes
import pandas as pd
from clutrr.relations.templator import *
#store = Store()
def generate_rows(args, store, task_name, split=0.8, prev_patterns=None):
# pre-flight checks
combination_length = min(args.combination_length, args.relation_length)
if not args.use_mturk_template:
if combination_length > 1:
raise NotImplementedError("combination of two or more relations not implemented in Synthetic templating")
else:
if combination_length > 3:
raise NotImplementedError("combinations of > 3 not implemented in AMT Templating")
# generate
print(args.relation_length)
print("Loading templates...")
all_puzzles = {}
if args.template_split:
train_templates = json.load(open(args.template_file + '.train.json'))
test_templates = json.load(open(args.template_file + '.test.json'))
else:
train_templates = json.load(open(args.template_file + '.json'))
test_templates = json.load(open(args.template_file + '.json'))
if args.use_mturk_template:
templatorClass = TemplatorAMT
else:
synthetic_templates_per_rel = {}
for key, val in store.relations_store.items():
for gender, gv in val.items():
synthetic_templates_per_rel[gv['rel']] = gv['p']
templatorClass = TemplatorSynthetic
train_templates = synthetic_templates_per_rel
test_templates = synthetic_templates_per_rel
# Build a mapping from ANY relation to the SAME list of sentences for asking queries
query_templates = {}
for key, val in store.relations_store.items():
for gender, gv in val.items():
query_templates[gv['rel']] = store.question_store['relational']
query_templator_class = TemplatorSynthetic
pb = tqdm(total=args.num_rows)
num_stories = args.num_rows
stories_left = num_stories
columns = ['id', 'story', 'query', 'text_query', 'target', 'text_target', 'clean_story', 'proof_state', 'f_comb',
'task_name','story_edges','edge_types','query_edge','genders', 'syn_story', 'node_mapping', 'task_split']
f_comb_count = {}
rows = []
anc_num = 0
anc_num += 1
anc = Ancestry(args, store)
rb = RelationBuilder(args, store, anc)
while stories_left > 0:
status = rb.build()
if not status:
rb.reset_puzzle()
rb.anc.next_flip()
continue
rb.add_facts()
# keeping a count of generated patterns to make sure we have homogenous distribution
if len(f_comb_count) > 0 and args.equal:
min_c = min([v for k,v in f_comb_count.items()])
weight = {k:(min_c/v) for k,v in f_comb_count.items()}
rb.generate_puzzles(weight)
else:
rb.generate_puzzles()
# if unique_test_pattern flag is set, and split is 0 (which indicates the task is test),
# only take the same test patterns as before
# also assert that the relation - test is present
if args.unique_test_pattern and split == 0 and len(prev_patterns) > 0 and len(prev_patterns[args.relation_length]['test']) > 0:
# if all these conditions met, prune the puzzles
todel = []
for pid,puzzle in rb.puzzles.items():
if puzzle.relation_comb not in prev_patterns[args.relation_length]['test']:
todel.append(pid)
for pid in todel:
del rb.puzzles[pid]
# now we have got the puzzles, assign the templators
for pid, puzzle in rb.puzzles.items():
if puzzle.relation_comb not in f_comb_count:
f_comb_count[puzzle.relation_comb] = 0
f_comb_count[puzzle.relation_comb] += 1
pb.update(1)
stories_left -= 1
# store the puzzles
all_puzzles.update(rb.puzzles)
rb.reset_puzzle()
rb.anc.next_flip()
pb.close()
print("Puzzles created. Now splitting train and test on pattern level")
print("Number of unique puzzles : {}".format(len(all_puzzles)))
pattern_puzzles = {}
for pid, pz in all_puzzles.items():
if pz.relation_comb not in pattern_puzzles:
pattern_puzzles[pz.relation_comb] = []
pattern_puzzles[pz.relation_comb].append(pid)
print("Number of unique patterns : {}".format(len(pattern_puzzles)))
train_puzzles = []
test_puzzles = []
sp = int(len(pattern_puzzles) * split)
all_patterns = list(pattern_puzzles.keys())
no_pattern_overlap = not args.holdout
# if k=2, then set no_pattern_overlap=True
if args.relation_length == 2:
no_pattern_overlap = True
if not no_pattern_overlap:
# for case > 3, strict no pattern overlap
train_patterns = all_patterns[:sp]
pzs = [pattern_puzzles[p] for p in train_patterns]
pzs = [s for p in pzs for s in p]
train_puzzles.extend(pzs)
test_patterns = all_patterns[sp:]
pzs = [pattern_puzzles[p] for p in test_patterns]
pzs = [s for p in pzs for s in p]
test_puzzles.extend(pzs)
else:
# for case of 2, pattern overlap but templators are different
# In this case, we have overlapping patterns, first choose the overlapping patterns
# we directly split on puzzle level
train_patterns = all_patterns
test_patterns = all_patterns[sp:]
pzs_train = []
pzs_test = []
for pattern in all_patterns:
pz = pattern_puzzles[pattern]
if pattern in test_patterns:
# now split - hacky way
sz = int(len(pz) * (split - 0.2))
pzs_train.extend(pz[:sz])
pzs_test.extend(pz[sz:])
else:
pzs_train.extend(pz)
train_puzzles.extend(pzs_train)
test_puzzles.extend(pzs_test)
print("# Train puzzles : {}".format(len(train_puzzles)))
print("# Test puzzles : {}".format(len(test_puzzles)))
pb = tqdm(total=len(all_puzzles))
# saving in csv
for pid, puzzle in all_puzzles.items():
task_split = ''
if pid in train_puzzles:
task_split = 'train'
templator = templatorClass(templates=train_templates, family=puzzle.anc.family_data)
elif pid in test_puzzles:
task_split = 'test'
templator = templatorClass(templates=test_templates, family=puzzle.anc.family_data)
else:
AssertionError("pid must be either in train or test")
story_text = puzzle.generate_text(stype='story', combination_length=combination_length, templator=templator)
fact_text = puzzle.generate_text(stype='fact', combination_length=combination_length, templator=templator)
story = story_text + fact_text
story = random.sample(story, len(story))
story = ' '.join(story)
clean_story = ' '.join(story_text)
target_text = puzzle.generate_text(stype='target', combination_length=1, templator=templator)
story_key_edges = puzzle.get_story_relations(stype='story') + puzzle.get_story_relations(stype='fact')
# Build query text
query_templator = query_templator_class(templates=query_templates, family=puzzle.anc.family_data)
query_text = puzzle.generate_text(stype='query', combination_length=1, templator=query_templator)
query_text = ' '.join(query_text)
query_text = query_text.replace('?.', '?') # remove trailing '.'
puzzle.convert_node_ids(stype='story')
puzzle.convert_node_ids(stype='fact')
story_keys_changed_ids = puzzle.get_sorted_story_edges(stype='story') + puzzle.get_sorted_story_edges(stype='fact')
query_edge = puzzle.get_sorted_query_edge()
genders = puzzle.get_name_gender_string()
rows.append([pid, story, puzzle.query_text, query_text, puzzle.target_edge_rel, target_text,
clean_story, puzzle.proof_trace, puzzle.relation_comb, task_name, story_keys_changed_ids,
story_key_edges, query_edge, genders, '', puzzle.story_sort_dict, task_split])
pb.update(1)
pb.close()
print("{} ancestries created".format(anc_num))
print("Number of unique patterns : {}".format(len(f_comb_count)))
return columns, rows, all_puzzles, train_patterns, test_patterns
def test_run(args):
store = Store(args)
anc = Ancestry(args, store)
rb = RelationBuilder(args, store, anc)
rb.num_rel = 3
all_patterns = set()
while True:
for j in range(len(anc.family_data.keys())):
rb.build()
up = rb.unique_patterns()
all_patterns.update(up)
print(len(all_patterns))
rb.reset_puzzle()
if not rb.anc.next_flip():
break
print("Number of unique puzzles : {}".format(len(all_patterns)))
rb.add_facts()
rb.generate_puzzles()
print("Generated {} puzzles".format(len(rb.puzzles)))
pid = random.choice(list(rb.puzzles.keys()))
print(rb.puzzles[pid])
def main(args):
store = Store(args)
header, rows = generate_rows(args, store)
df = pd.DataFrame(columns=header, data=rows)
# split test train
msk = np.random.rand(len(df)) > args.test
train_df = df[msk]
test_df = df[~msk]
train_df.to_csv(args.output + '_train.csv')
test_df.to_csv(args.output + '_test.csv')
if __name__ == '__main__':
args = get_args()
test_run(args)
#main(args)
|
clutrr-main
|
clutrr/generator.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
## Note: With these current args (max level 3, min_child = max_child = 4), its only possible to generate
## upto 8 relations in my cpu. The code is not optimized yet.
import argparse
def get_args(command=None):
parser = argparse.ArgumentParser()
# graph parameters
parser.add_argument("--max_levels", default=3, type=int, help="max number of levels")
parser.add_argument("--min_child", default=4, type=int, help="max number of children per node")
parser.add_argument("--max_child", default=4, type=int, help="max number of children per node")
parser.add_argument("--p_marry", default=1.0, type=float, help="Probability of marriage among nodes")
# story parameters
parser.add_argument("--boundary",default=True, action='store_true', help='Boundary in entities')
parser.add_argument("--output", default="gen_m3", type=str, help='Prefix of the output file')
# Arguments not used now, use `--train_tasks` to set the task type and relation length
# parser.add_argument("--relation_length", default=3, type=int, help="Max relation path length")
# noise choices
# parser.add_argument("--noise_support", default=False, action='store_true',
# help="Noise type: Supporting facts")
# parser.add_argument("--noise_irrelevant", default=False, action='store_true',
# help="Noise type: Irrelevant facts")
# parser.add_argument("--noise_disconnected", default=False, action='store_true',
# help="Noise type: Disconnected facts")
# parser.add_argument("--noise_attributes", default=False, action='store_true',
# help="Noise type: Random attributes")
# store locations
parser.add_argument("--rules_store", default="rules_store.yaml", type=str, help='Rules store')
parser.add_argument("--relations_store", default="relations_store.yaml", type=str, help='Relations store')
parser.add_argument("--attribute_store", default="attribute_store.json", type=str, help='Attributes store')
parser.add_argument("--question_store", default="question_store.yaml", type=str, help='Question store')
# task
parser.add_argument("--train_tasks", default="1.3", type=str, help='Define which task to create dataset for, including the relationship length, comma separated')
parser.add_argument("--test_tasks", default="1.3", type=str, help='Define which tasks including the relation lengths to test for, comma separaated')
parser.add_argument("--train_rows", default=100, type=int, help='number of train rows')
parser.add_argument("--test_rows", default=100, type=int, help='number of test rows')
parser.add_argument("--memory", default=1, type=float, help='Percentage of tasks which are just memory retrieval')
parser.add_argument("--data_type", default="train", type=str, help='train/test')
# question type
parser.add_argument("--question", default=0, type=int, help='Question type. 0 -> relational, 1 -> yes/no')
# others
# parser.add_argument("--min_distractor_relations", default=8, type=int, help="Distractor relations about entities")
parser.add_argument("-v","--verbose", default=False, action='store_true',
help='print the paths')
parser.add_argument("-t","--test_split", default=0.2, help="Testing split")
parser.add_argument("--equal", default=False, action='store_true',
help="Make sure each pattern is equal. Warning: Time complexity of generation increases if this flag is set.")
parser.add_argument("--analyze", default=False, action='store_true', help="Analyze generated files")
parser.add_argument("--mturk", default=False, action='store_true', help='prepare data for mturk')
parser.add_argument("--holdout", default=False, action='store_true', help='if true, then hold out unique patterns in the test set')
parser.add_argument("--data_name", default='', type=str, help='Dataset name')
parser.add_argument("--use_mturk_template", default=False, action='store_true', help='use the templating data for mturk')
parser.add_argument("--template_length", type=int, default=2, help="Max Length of the template to substitute")
parser.add_argument("--template_file", type=str, default="amt_placeholders_clean.json", help="location of placeholders")
parser.add_argument("--template_split", default=True, action='store_true', help='Split on template level')
parser.add_argument("--combination_length", type=int, default=1, help="number of relations to combine together")
parser.add_argument("--output_dir", type=str, default="data", help="output_dir")
parser.add_argument("--store_full_puzzles", default=False, action='store_true',
help='store the full puzzle data in puzzles.pkl file. Warning: may take considerable amount of disk space!')
parser.add_argument("--unique_test_pattern", default=False, action='store_true', help="If true, have unique patterns generated in the first gen, and then choose from it.")
if command:
return parser.parse_args(command.split(' '))
else:
return parser.parse_args()
|
clutrr-main
|
clutrr/args.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
# main file which defines the tasks
from clutrr.args import get_args
from clutrr.generator import generate_rows
from clutrr.store.store import Store
import pandas as pd
import glob
import copy
import uuid
import os
import json
import shutil
import sys
import nltk
from nltk.tokenize import word_tokenize
import pickle as pkl
import requests
import hashlib
import zipfile
# check if nltk.punkt is installed
try:
nltk.data.find('tokenizers/punkt')
except LookupError:
nltk.download('punkt')
logPath = '../logs/'
fileName = 'data'
# sha of the placeholder files
SHA_SUM = 'ed2264836bb17fe094dc21fe6bb7492b000df520eb86f8e60b8441121f8ff924'
download_url = "https://cs.mcgill.ca/~ksinha4/data/"
import logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
# logging.FileHandler("{0}/{1}.log".format(logPath, fileName)),
logging.StreamHandler()
]
)
logger = logging.getLogger()
class Clutrr:
"""
Data Generation Script for the paper
"CLUTRR - A benchmark suite for inductive reasoning on text"
"""
def __init__(self, args):
self.args = self._init_vars(args)
# store the unique patterns for each relation here
self.unique_patterns = {}
self.setup()
def generate(self, choice, args, num_rows=0, data_type='train', multi=False, split=None):
"""
Choose the task and the relation length
Return the used args for storing
:param choice:
:param args:
:param num_rows:
:param data_type:
:param multi:
:return:
"""
args = copy.deepcopy(args)
args.num_rows = num_rows
args.data_type = data_type
if not multi:
task, relation_length = choice.split('.')
task_name = 'task_{}'.format(task)
logger.info("mode : {}, task : {}, rel_length : {}".format(data_type, task_name, relation_length))
task_method = getattr(self, task_name, lambda: "Task {} not implemented".format(choice))
args = task_method(args)
args.relation_length = int(relation_length)
store = Store(args)
columns, rows, all_puzzles, train_patterns, test_patterns = generate_rows(args,
store, task_name + '.{}'.format(relation_length), split=split, prev_patterns=self.unique_patterns)
self.unique_patterns[int(relation_length)] = {
'train': train_patterns,
'test': test_patterns
}
return (columns, rows, all_puzzles), args
else:
rows = []
columns = []
puzzles = {}
for ch in choice:
task, relation_length = ch.split('.')
task_name = 'task_{}'.format(task)
logger.info("task : {}, rel_length : {}".format(task_name, relation_length))
task_method = getattr(self, task_name, lambda: "Task {} not implemented".format(choice))
args = task_method(args)
args.relation_length = int(relation_length)
store = Store(args)
columns,r,pz = generate_rows(args, store, task_name + '.{}'.format(relation_length))
rows.extend(r)
puzzles.update(pz)
return ((columns, rows, puzzles), args)
def run_task(self):
"""
Default dispatcher method
"""
args = self.args
train_rows = args.train_rows
test_rows = args.test_rows
train_choices = args.train_tasks.split(',')
test_choices = args.test_tasks.split(',')
all_choices = []
for t in train_choices:
if t not in all_choices:
all_choices.append(t)
for t in test_choices:
if t not in all_choices:
all_choices.append(t)
train_datas = []
for choice in all_choices:
if choice in train_choices:
# split
choice_split = train_rows / (train_rows + test_rows)
num_rows = train_rows + test_rows
else:
# test, no split
choice_split = 0.0
num_rows = test_rows
print("Split : {}".format(choice_split))
train_datas.append(self.generate(choice, args, num_rows=num_rows, data_type='train', split=choice_split))
self.store(train_datas, None, args)
def assign_name(self, args, task_name):
"""
Create a name for the datasets:
- training file should end with _train
- testing file should end with _test
- each file name should have an unique hex
:param args:
:return:
"""
name = '{}_{}.csv'.format(task_name, args.data_type)
return name
def store(self, train_data, test_data, args):
"""
Take the dataset and do the following:
- Create a name for the files
- Create a folder and put the files in
- Write the config in a file and put it in the folder
- Compute the hash of the train and test files and store it in a file
:param train_data list of rows
:param test_data list of list of rows
:return:
"""
train_tasks = args.train_tasks.split(',')
all_puzzles = {}
train_df = []
test_df = []
for i, td in enumerate(train_data):
train_rows_puzzles, train_args = td
assert len(train_rows_puzzles) == 3
train_rows, train_puzzles = train_rows_puzzles[:-1], train_rows_puzzles[-1]
trdfs = [r for r in train_rows[1] if r[-1] == 'train']
tsdfs = [r for r in train_rows[1] if r[-1] == 'test']
train_df.append(pd.DataFrame(columns=train_rows[0], data=trdfs))
test_df.append(pd.DataFrame(columns=train_rows[0], data=tsdfs))
train_df = pd.concat(train_df)
test_df = pd.concat(test_df)
logger.info("Training rows : {}".format(len(train_df)))
logger.info("Testing rows : {}".format(len(test_df)))
# prepare configs
all_config = {}
train_fl_name = self.assign_name(train_args, args.train_tasks)
all_config['train_task'] = {args.train_tasks: train_fl_name}
all_config['test_tasks'] = {}
test_fl_names = []
all_config['args'] = {}
all_config['args'][train_fl_name] = vars(train_args)
test_tasks = args.test_tasks.split(',')
test_dfs = []
for test_task in test_tasks:
train_args.data_type = 'test'
test_fl_name = self.assign_name(train_args,test_task)
all_config['args'][test_fl_name] = vars(train_args)
test_fl_names.append(test_fl_name)
test_dfs.append(test_df[test_df.task_name == 'task_'+test_task])
base_path = os.path.abspath(os.pardir)
# derive folder name as a random selection of characters
directory = ''
while True:
folder_name = 'data_{}'.format(str(uuid.uuid4())[:8])
directory = os.path.join(base_path, args.output_dir, folder_name)
if not os.path.exists(directory):
os.makedirs(directory)
break
train_df.to_csv(os.path.join(directory, train_fl_name))
for i,test_fl_name in enumerate(test_fl_names):
test_df = test_dfs[i]
test_df.to_csv(os.path.join(directory, test_fl_name))
# dump config
json.dump(all_config, open(os.path.join(directory, 'config.json'),'w'))
if args.store_full_puzzles:
# dump all puzzles
pkl.dump(all_puzzles, open(os.path.join(directory, 'puzzles.pkl'),'wb'), protocol=-1)
shutil.make_archive(directory, 'zip', directory)
logger.info("Created dataset in {}".format(directory))
self.analyze_data(directory)
if args.mturk:
self.keep_unique(directory)
def analyze_data(self, directory):
"""
Analyze a given directory
:param directory:
:return:
"""
all_files = glob.glob(os.path.join(directory,'*.csv'))
for fl in all_files:
logger.info("Analyzing file {}".format(fl))
df = pd.read_csv(fl)
df['word_len'] = df.story.apply(lambda x: len(word_tokenize(x)))
df['word_len_clean'] = df.clean_story.apply(lambda x: len(word_tokenize(x)))
print("Max words : ", df.word_len.max())
print("Min words : ", df.word_len.min())
print("For clean story : ")
print("Max words : ", df.word_len_clean.max())
print("Min words : ", df.word_len_clean.min())
logger.info("Analysis complete")
def keep_unique(self, directory, num=1):
"""
Keep num unique rows for each pattern. Handy for Mturk collection.
:param num:
:return:
"""
all_files = glob.glob(os.path.join(directory, '*.csv'))
for fl in all_files:
df = pd.read_csv(fl)
uniq_patterns = df['f_comb'].unique()
udf = []
for up in uniq_patterns:
# randomly select one row for this unique pattern
rd = df[df['f_comb'] == up].sample(num)
udf.append(rd)
udf = pd.concat(udf)
udf.to_csv(fl)
def _init_vars(self, args):
args.noise_support = False
args.noise_irrelevant = False
args.noise_disconnected = False
args.noise_attributes = False
args.memory = 0
return args
def task_1(self, args):
"""
Basic family relation without any noise
:return:
"""
args.output += '_task1'
return args
def task_2(self, args):
"""
Family relation with supporting facts
:return:
"""
args.noise_support = True
args.output += '_task2'
return args
def task_3(self, args):
"""
Family relation with irrelevant facts
:return:
"""
args.noise_irrelevant = True
args.output += '_task3'
return args
def task_4(self, args):
"""
Family relation with disconnected facts
:return:
"""
args.noise_disconnected = True
args.output += '_task4'
return args
def task_5(self, args):
"""
Family relation with all facts
:return:
"""
args.noise_support = True
args.noise_irrelevant = True
args.noise_disconnected = True
args.output += '_task5'
return args
def task_6(self, args):
"""
Family relation with only memory retrieval
:param args:
:return:
"""
args.memory = 1.0
args.output += '_task6'
return args
def task_7(self, args):
"""
Family relation with mixed memory and reasoning
:param args:
:return:
"""
args.memory = 0.5
args.output += '_task7'
args.noise_support = False
args.noise_disconnected = False
args.noise_disconnected = False
return args
def setup(self):
"""
Download placeholders and update args
:return:
"""
placeholder_zip = "cleaned_placeholders.zip"
placeholder_url = download_url + placeholder_zip
base_path = os.path.abspath(os.pardir)
placeholder_loc = os.path.join(base_path, placeholder_zip)
if os.path.exists(placeholder_loc):
print("downloaded placeholder data exists")
else:
print("Downloading placeholder data")
r = requests.get(placeholder_url)
with open(placeholder_loc, 'wb') as f:
f.write(r.content)
# check shasum
sha1 = hashlib.sha256()
BUF_SIZE = 65536
with open(placeholder_loc, 'rb') as f:
while True:
data = f.read(BUF_SIZE)
if not data:
break
sha1.update(data)
print("sha256 : {}".format(sha1.hexdigest()))
print("checking ...")
if sha1.hexdigest() != SHA_SUM:
raise AssertionError("downloaded corrupt data, sha256 doesn't match")
print("Data valid")
# extract zip
with zipfile.ZipFile(placeholder_loc, "r") as zip_ref:
zip_ref.extractall(os.path.join(base_path, 'clutrr'))
# set args
self.args.template_file = "cleaned_placeholders/amt_placeholders_clean"
if __name__ == '__main__':
args = get_args()
logger.info("Data generation started for configurations : ")
logger.info('\ntogrep : {0}\n'.format(sys.argv[1:]))
cl = Clutrr(args)
cl.run_task()
logger.info("\ntogrep : Data generation done {0}\n".format(sys.argv[1:]))
logger.info("-----------------------------------------------------")
|
clutrr-main
|
clutrr/main.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
# Main Puzzle class which maintains the state of a single puzzle
import uuid
import random
from clutrr.utils.utils import comb_indexes
from clutrr.relations.templator import Templator
import copy
import networkx as nx
import matplotlib.pyplot as plt
import numpy as np
class Fact:
"""
Fact class to store the additional facts
"""
def __init__(self,
fact_type=None,
fact_edges=None):
"""
:param fact_type: Type of the fact, supporting / irrelevant / disconnected
:param fact_edges:
"""
self.fact_type = fact_type
self.fact_edges = fact_edges
def __str__(self):
if self.fact_edges:
return "Type: {}, E: {}".format(self.fact_type, self.fact_edges)
class Puzzle:
"""
Puzzle class containing the logic to build and maintain the state of a single puzzle
"""
def __init__(self,
id = None,
target_edge=None,
story=None,
proof=None,
query_edge=None,
ancestry=None,
relations_obj=None
):
"""
:param id: unique id of the puzzle
:param target_edge: the target edge, (node_a, node_b)
:param story: list of edges consisting of the story
:param proof: proof state of the resolution from target edge to story
:param query_edge: edge to query, usually the same as target_edge
:param ancestry: full background graph the story was derived from
:param relations_obj: store of the rule base of the relations
"""
if id is None:
self.id = str(uuid.uuid4())
else:
self.id = id
self.target_edge = target_edge
self.story = story
self.proof_trace = proof
self.facts = []
self.query_edge = query_edge
self.anc = ancestry
self.relations_obj = relations_obj
# derived values
self.query_text = None
self.target_edge_rel = None
self.story_rel = None
self.text_question = None
self.relation_comb = None
# derived full text story
self.full_text_story = None
# story edges with sorted node ids
self.story_sorted_ids = None
self.story_sort_dict = {} # mapping between the original node id and sorted node id
# the templator instances to use
self.train_templates = None
self.test_templates = None
def derive_vals(self):
self.query_text = self.format_edge(self.target_edge)
self.target_edge_rel = self.get_edge_relation(self.target_edge)
self.story_rel = [self.format_edge_rel(story) for story in self.story]
self.relation_comb = '-'.join([self.get_edge_rel(x)['rel'] for x in self.story])
def add_fact(self, fact_type, fact):
"""
Add a fact to the model
:param fact_type:
:param fact:
:return:
"""
self.facts.append(Fact(fact_type=fact_type, fact_edges=fact))
def clear_facts(self):
"""
Clear all noise facts of the puzzle
:return:
"""
self.facts = []
def get_full_story(self, randomize=True):
"""
Combine story and facts
:param randomize:
:return:
"""
full_story = self.story + [edge for fact in self.facts for edge in fact.fact_edges]
if randomize:
full_story = random.sample(full_story, len(full_story))
return full_story
def get_all_noise(self):
"""
Get only noise edges
:return:
"""
return [edge for fact in self.facts for edge in fact.fact_edges]
def get_clean_story(self):
"""
Return the clean story
:return:
"""
return self.story
def generate_text(self, stype='story', combination_length=1, templator:Templator=None, edges=None):
"""
:param stype: can be story, fact, target, or query
:param combination_length: the max length of combining the edges for text replacement
:param templator: templator class
:param edges: if provided, use these edges instead of stypes
:return:
"""
generated_rows = []
if edges is None:
if stype == 'story':
edges_to_convert = copy.copy(self.story)
elif stype == 'fact':
edges_to_convert = copy.copy([fact.fact_edges for fact in self.facts])
edges_to_convert = [y for x in edges_to_convert for y in x]
elif stype == 'target':
# derive the relation (solution) from the target edge
edges_to_convert = [copy.copy(self.target_edge)]
elif stype == 'query':
# derive the question from the target edge
edges_to_convert = [copy.copy(self.target_edge)]
else:
raise NotImplementedError("stype not implemented")
else:
edges_to_convert = edges
combined_edges = comb_indexes(edges_to_convert, combination_length)
for comb_group in combined_edges:
r_combs = ['-'.join([self.get_edge_relation(edge) for edge in edge_group])
for edge_group in comb_group]
# typo unfix for "neice niece"
r_combs = [r.replace('niece','neice') if 'niece' in r else r for r in r_combs ]
r_entities = [[ent for edge in edge_group for ent in edge] for edge_group
in comb_group]
prows = [templator.replace_template(edge_group, r_entities[group_id])
for group_id, edge_group in enumerate(r_combs)]
# if contains None, then reject this combination
prc = [x for x in prows if x is not None]
if len(prc) == len(prows):
generated_rows.append(prows)
# select the generated row such that the priority of
# complex decomposition is higher. sort by length and choose the min
generated_rows = list(sorted(generated_rows, key=len))
generated_rows = [g for g in generated_rows if len(g) > 0]
if stype == 'story':
if len(generated_rows) == 0:
# assert
raise AssertionError()
if len(generated_rows) > 0:
generated_row = random.choice(generated_rows)
for g in generated_row:
if type(g) != str:
import ipdb; ipdb.set_trace()
return generated_row
else:
return []
def convert_node_ids(self, stype='story'):
"""
Given node ids in edges, change the ids into a sorted version
:param stype:
:return:
"""
if stype == 'story':
edges_tc = copy.copy(self.story)
elif stype == 'fact':
edges_tc = copy.copy([fact.fact_edges for fact in self.facts])
edges_tc = [y for x in edges_tc for y in x]
else:
raise NotImplementedError("stype not implemented")
node_ct = len(self.story_sort_dict)
for key in edges_tc:
if key[0] not in self.story_sort_dict:
self.story_sort_dict[key[0]] = node_ct
node_ct += 1
if key[1] not in self.story_sort_dict:
self.story_sort_dict[key[1]] = node_ct
node_ct += 1
def get_name_gender_string(self):
"""
Create a combination of name:Gender
:return:
"""
if self.story_sorted_ids is None:
self.convert_node_ids('story')
return ','.join(['{}:{}'.format(self.anc.family_data[node_id].name,
self.anc.family_data[node_id].gender)
for node_id in self.story_sort_dict.keys()])
def get_sorted_story_edges(self, stype='story'):
"""
Overlay changed node ids onto story edges
:param stype:
:return:
"""
if stype == 'story':
edges_tc = copy.copy(self.story)
elif stype == 'fact':
edges_tc = copy.copy([fact.fact_edges for fact in self.facts])
edges_tc = [y for x in edges_tc for y in x]
else:
raise NotImplementedError("stype not implemented")
edge_keys_changed_id = [(self.story_sort_dict[key[0]],
self.story_sort_dict[key[1]]) for key in edges_tc]
return edge_keys_changed_id
def get_story_relations(self, stype='story'):
if stype == 'story':
edges_tc = copy.copy(self.story)
elif stype == 'fact':
edges_tc = copy.copy([fact.fact_edges for fact in self.facts])
edges_tc = [y for x in edges_tc for y in x]
else:
raise NotImplementedError("stype not implemented")
return [self.get_edge_relation(p) for p in edges_tc]
def get_sorted_query_edge(self):
"""
Overlay changed node ids onto query edge
:return:
"""
return (self.story_sort_dict[self.target_edge[0]],
self.story_sort_dict[self.target_edge[1]])
def get_target_relation(self):
"""
Get target relation
:return:
"""
return self.get_edge_relation(self.target_edge)
def get_edge_rel(self, edge, rel_type='family'):
# get node attributes
node_b_attr = self.anc.family_data[edge[1]]
relation = self.anc.family[edge][rel_type]
edge_rel = self.relations_obj[relation][node_b_attr.gender]
return edge_rel
def get_edge_relation(self, edge, rel_type='family'):
node_b_attr = self.anc.family_data[edge[1]]
relation = self.anc.family[edge][rel_type]
edge_rel = self.relations_obj[relation][node_b_attr.gender]
return edge_rel['rel']
def format_edge(self, edge):
"""
Given an edge (x,y), format it into (name(x), name(y))
:param edge:
:return:
"""
node_a_attr = self.anc.family_data[edge[0]]
node_b_attr = self.anc.family_data[edge[1]]
new_edge = (node_a_attr.name, node_b_attr.name)
return new_edge
def format_edge_rel(self, edge, rel_type='family'):
"""
Given an edge (x,y), format it into (name(x), rel(x,y), name(y))
:param edge:
:return:
"""
node_a_attr = self.anc.family_data[edge[0]]
node_b_attr = self.anc.family_data[edge[1]]
edge_rel = self.get_edge_rel(edge, rel_type)['rel']
new_edge = (node_a_attr.name, edge_rel, node_b_attr.name)
return new_edge
def get_unique_relations(self):
"""
Get all unique relations from rule store
:return:
"""
rels = []
for meta_rel, val in self.relations_obj.items():
for sp_rel, sp_val in val.items():
rels.append(sp_val['rel'])
rels.remove('no-relation')
return rels
def display(self):
"""
Display the puzzle in a network diagram
:return:
"""
G = nx.MultiDiGraph()
fs = self.get_full_story()
names = {}
rels = {}
forward_edges = []
backward_edges = []
gendered_nodes = {'male':[], 'female':[]}
for edge in fs:
G.add_node(edge[0])
G.add_node(edge[1])
gendered_nodes[self.anc.family_data[edge[0]].gender].append(edge[0])
gendered_nodes[self.anc.family_data[edge[1]].gender].append(edge[1])
names[edge[0]] = self.anc.family_data[edge[0]].name
names[edge[1]] = self.anc.family_data[edge[1]].name
G.add_edge(edge[0], edge[1])
forward_edges.append(edge)
rels[edge] = self.get_edge_relation(edge)
G.add_edge(edge[1], edge[0])
backward_edges.append(edge)
rels[(edge[1], edge[0])] = self.get_edge_relation((edge[1], edge[0]))
pos = nx.layout.random_layout(G)
nx.draw_networkx_nodes(G, pos, nodelist=gendered_nodes['male'], node_color='b', node_size=100, alpha=0.8)
nx.draw_networkx_nodes(G, pos, nodelist=gendered_nodes['female'], node_color='r', node_size=100, alpha=0.8)
nx.draw_networkx_labels(G, pos, names)
nx.draw_networkx_edges(G, pos, edgelist=forward_edges, arrowstyle='-', edge_color='r')
nx.draw_networkx_edges(G, pos, edgelist=backward_edges, arrowstyle='-', edge_color='b')
edge_labels = nx.draw_networkx_edge_labels(G, pos, rels)
ax = plt.gca()
ax.set_axis_off()
plt.show()
def __str__(self):
tmp = "Story : \n"
tmp += "{} \n".format(self.story)
tmp += "{} \n".format([self.format_edge_rel(e) for e in self.story])
tmp += "Additional facts : \n"
for fact in self.facts:
tmp += "{} \n".format(fact)
return tmp
|
clutrr-main
|
clutrr/relations/puzzle.py
|
clutrr-main
|
clutrr/relations/__init__.py
|
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
import copy
import random
class Templator:
"""
Templator base class
"""
def __init__(self, templates, family):
self.templates = copy.copy(templates)
self.family = family # dict containing node informations
self.used_template = ''
self.entity_id_dict = {}
self.seen_ent = set()
def choose_template(self, *args, **kwargs):
pass
def replace_template(self, *args, **kwargs):
pass
class TemplatorAMT(Templator):
"""
Replaces story with the templates obtained from AMT
"""
def __init__(self, templates, family):
super(TemplatorAMT, self).__init__(templates=templates, family=family)
def choose_template(self, f_comb, entities, verbose=False):
"""
Choose a template to use. Do not use the same template in this current context
:return:
"""
self.entity_id_dict = {}
self.seen_ent = set()
gender_comb = []
# build the dictionary of entity - ids
for ent in entities:
if ent not in self.seen_ent:
gender_comb.append(self.family[ent].gender)
self.seen_ent.add(ent)
self.entity_id_dict[ent] = len(self.entity_id_dict)
gender_comb = '-'.join(gender_comb)
if verbose:
print(f_comb)
print(gender_comb)
print(len(self.templates[f_comb][gender_comb]))
if gender_comb not in self.templates[f_comb] or len(self.templates[f_comb][gender_comb]) == 0:
raise NotImplementedError("template combination not found.")
available_templates = self.templates[f_comb][gender_comb]
chosen_template = random.choice(available_templates)
self.used_template = chosen_template
used_i = self.templates[f_comb][gender_comb].index(chosen_template)
# remove the used template
# del self.templates[f_comb][gender_comb][used_i]
return chosen_template
def replace_template(self, f_comb, entities, verbose=False):
try:
chosen_template = self.choose_template(f_comb, entities, verbose=verbose)
for ent_id, ent in enumerate(list(set(entities))):
node = self.family[ent]
gender = node.gender
name = node.name
chosen_template = chosen_template.replace('ENT_{}_{}'.format(self.entity_id_dict[ent], gender), '[{}]'.format(name))
return chosen_template
except:
# chosen template not found
return None
class TemplatorSynthetic(Templator):
"""
Replaces story with the templates obtained from Synthetic rule base
Here, templates is self.relations_obj[relation]
"""
def __init__(self, templates, family):
super(TemplatorSynthetic, self).__init__(templates=templates, family=family)
def choose_template(self, f_comb, entities, verbose=False):
"""
Choose a template to use. Do not use the same template in this current context
:return:
"""
self.entity_id_dict = {}
self.seen_ent = set()
available_templates = self.templates[f_comb]
chosen_template = random.choice(available_templates)
self.used_template = chosen_template
return chosen_template
def replace_template(self, f_comb, entities, verbose=False):
assert len(entities) == 2
chosen_template = self.choose_template(f_comb, entities, verbose=verbose)
node_a_attr = self.family[entities[0]]
node_b_attr = self.family[entities[1]]
node_a_name = node_a_attr.name
node_b_name = node_b_attr.name
assert node_a_name != node_b_name
node_a_name = '[{}]'.format(node_a_name)
node_b_name = '[{}]'.format(node_b_name)
text = chosen_template.replace('e_1', node_a_name)
text = text.replace('e_2', node_b_name)
return text + '. '
|
clutrr-main
|
clutrr/relations/templator.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
# New builder class which makes use of our new data generation
import random
import itertools as it
import copy
from clutrr.store.store import Store
import uuid
from clutrr.relations.puzzle import Puzzle
class RelationBuilder:
"""
Relation builder class
Steps:
- Accept a skeleton class
- Iteratively:
- Invert the relations
- Sample edge e (n1, n2)
- Select the rule which matches this edge e (n1,n2) -> r
- introduce a variable x so that (n1,x) + (x,n2) -> r
- find the x which satifies both s.t x =/= {n1, n2}
- either add to story
- or recurse
Changes:
- Relation types are "family","work", etc (as given in ``relation_types``
- When applying the rules, make sure to confirm to these types
"""
def __init__(self,args, store:Store, anc):
self.anc = anc
self.args = args
self.rules = store.rules_store
self.store = store
self.comp_rules = self.rules['compositional']
self.inv_rules = self.rules['inverse-equivalence']
self.sym_rules = self.rules['symmetric']
self.eq_rules = self.rules['equivalence']
self.relation_types = self.rules['relation_types']
self.comp_rules_inv = self._invert_rule(self.rules['compositional'])
self.inv_rules_inv = self._invert_rule(self.rules['inverse-equivalence'])
self.sym_rules_inv = self._invert_rule(self.rules['symmetric'])
self.eq_rules_inv = self._invert_rule(self.rules['equivalence'])
self.relations_obj = store.relations_store
self.boundary = args.boundary
self.num_rel = args.relation_length
self.puzzles = {}
self.puzzle_ct = 0
self.expansions = {} # (a,b) : [list]
# save the edges which are used already
self.done_edges = set()
self.apply_almost_complete()
self.precompute_expansions(list(self.anc.family.keys()))
def _invert_rule(self, rule):
"""
Given a rule, invert it to be RHS:LHS
:param rule:
:return:
"""
inv_rules = {}
for tp, rules in rule.items():
inv_rules[tp] = {}
for key, val in rules.items():
if type(val) == str:
if val not in inv_rules[tp]:
inv_rules[tp][val] = []
inv_rules[tp][val].append(key)
else:
for k2, v2 in val.items():
if v2 not in inv_rules[tp]:
inv_rules[tp][v2] = []
inv_rules[tp][v2].append((key, k2))
return inv_rules
def invert_rel(self, rel_type='family'):
"""
Invert the relations
:return:
"""
if rel_type not in self.inv_rules:
return None
inv_family = copy.deepcopy(self.anc.family)
for edge, rel in self.anc.family.items():
relation = rel[rel_type]
if relation in self.inv_rules[rel_type]:
inv_rel = self.inv_rules[rel_type][relation]
if (edge[1], edge[0]) not in inv_family:
inv_family[(edge[1], edge[0])] = {}
inv_family[(edge[1], edge[0])][rel_type] = inv_rel
self.anc.family = inv_family
def equivalence_rel(self, rel_type='family'):
"""
Use equivalence relations
:return:
"""
if rel_type not in self.eq_rules:
return None
n_family = copy.deepcopy(self.anc.family)
for edge, rel in self.anc.family.items():
relation = rel[rel_type]
if relation in self.eq_rules[rel_type]:
eq_rel = self.eq_rules[rel_type][relation]
n_family[(edge[0],edge[1])][rel_type] = eq_rel
self.anc.family = n_family
def symmetry_rel(self, rel_type='family'):
"""
Use equivalence relations
:return:
"""
if rel_type not in self.sym_rules:
return None
n_family = copy.deepcopy(self.anc.family)
for edge, rel in self.anc.family.items():
relation = rel[rel_type]
if relation in self.sym_rules[rel_type]:
sym_rel = self.sym_rules[rel_type][relation]
if (edge[1], edge[0]) not in n_family:
n_family[(edge[1], edge[0])] = {}
n_family[(edge[1], edge[0])][rel_type] = sym_rel
self.anc.family = n_family
def compose_rel(self, edge_1, edge_2, rel_type='family', verbose=False):
"""
Given an edge pair, add the edges into a single edge following the rules
in the dictionary
:param edge_1: (x,z)
:param edge_2: (z,y)
:param rel_type:
:return: (x,y)
"""
# dont allow self edges
if edge_1[0] == edge_1[1]:
return None
if edge_2[0] == edge_2[1]:
return None
if edge_1[1] == edge_2[0] and edge_1[0] != edge_2[1]:
n_edge = (edge_1[0], edge_2[1])
if n_edge not in self.anc.family and \
(edge_1 in self.anc.family and
self.anc.family[edge_1][rel_type] in self.comp_rules[rel_type]):
if edge_2 in self.anc.family and \
self.anc.family[edge_2][rel_type] in self.comp_rules[rel_type][self.anc.family[edge_1][rel_type]]:
n_rel = self.comp_rules[rel_type][self.anc.family[edge_1][rel_type]][self.anc.family[edge_2][rel_type]]
if n_edge not in self.anc.family:
self.anc.family[n_edge] = {}
self.anc.family[n_edge][rel_type] = n_rel
if verbose:
print(edge_1, edge_2, n_rel)
return n_edge
return None
def almost_complete(self,edge):
"""
Build an almost complete graph by iteratively applying the rules
Recursively apply rules and invert
:return:
"""
# apply symmetric, equivalence and inverse rules
self.invert_rel()
self.equivalence_rel()
self.symmetry_rel()
# apply compositional rules
keys = list(self.anc.family.keys())
edge_1 = [self.compose_rel(e, edge) for e in keys if e[1] == edge[0]]
edge_2 = [self.compose_rel(edge, e) for e in keys if e[0] == edge[1]]
edge_1 = list(filter(None.__ne__, edge_1))
edge_2 = list(filter(None.__ne__, edge_2))
for e in edge_1:
self.almost_complete(e)
for e in edge_2:
self.almost_complete(e)
def apply_almost_complete(self):
"""
For each edge apply ``almost_complete``
:return:
"""
print("Almost completing the family graph with {} nodes...".format(len(self.anc.family_data)))
for i in range(len(self.anc.family_data)):
for j in range(len(self.anc.family_data)):
if i != j:
self.almost_complete((i, j))
print("Initial family tree created with {} edges".format(
len(set([k for k, v in self.anc.family.items()]))))
def build(self):
"""
Build the stories and targets for the current family configuration
and save it in memory. These will be used later for post-processing
:param num_rel:
:return:
"""
available_edges = set([k for k, v in self.anc.family.items()]) - self.done_edges
#print("Available edges to derive backwards - {}".format(len(available_edges)))
for edge in available_edges:
pz = self.build_one_puzzle(edge)
if pz:
self.puzzles[pz.id] = pz
self.puzzle_ct += 1
if len(self.puzzles) == 0:
print("No puzzles could be generated with this current set of arguments. Consider increasing the family tree.")
return False
#print("Generated {}".format(len(self.puzzles)))
return True
def build_one_puzzle(self, edge):
"""
Build one puzzle
Return False if unable to make the puzzle
:return: type Puzzle
"""
story, proof_trace = self.derive([edge], k=self.num_rel - 1)
if len(story) == self.num_rel:
id = str(uuid.uuid4())
pz = Puzzle(id=id, target_edge=edge, story=story,
proof=proof_trace, ancestry=copy.deepcopy(self.anc),
relations_obj=copy.deepcopy(self.relations_obj))
pz.derive_vals()
return pz
else:
return False
def reset_puzzle(self):
"""Reset puzzle to none"""
self.puzzles = {}
self.puzzles_ct = 0
def unique_patterns(self):
"""Get unique patterns in this puzzle"""
f_comb_count = {}
for pid, puzzle in self.puzzles.items():
if puzzle.relation_comb not in f_comb_count:
f_comb_count[puzzle.relation_comb] = 0
f_comb_count[puzzle.relation_comb] += 1
return set(f_comb_count.keys())
def _value_counts(self):
pztype = {}
for pid, puzzle in self.puzzles.items():
f_comb = puzzle.relation_comb
if f_comb not in pztype:
pztype[f_comb] = []
pztype[f_comb].append(pid)
return pztype
def prune_puzzles(self, weight=None):
"""
In order to keep all puzzles homogenously distributed ("f_comb"), we calcuate
the count of all type of puzzles, and retain the minimum count
:param weight: a dict of weights f_comb:p where 0 <= p <= 1
:return:
"""
pztype = self._value_counts()
pztype_min_count = min([len(v) for k,v in pztype.items()])
keep_puzzles = []
for f_comb, pids in pztype.items():
keep_puzzles.extend(random.sample(pids, pztype_min_count))
not_keep = set(self.puzzles.keys()) - set(keep_puzzles)
for pid in not_keep:
del self.puzzles[pid]
if weight:
pztype = self._value_counts()
# fill in missing weights
for f_comb, pids in pztype.items():
if f_comb not in weight:
weight[f_comb] = 1.0
keep_puzzles = []
for f_comb,pids in pztype.items():
if weight[f_comb] == 1.0:
keep_puzzles.extend(pids)
not_keep = set(self.puzzles.keys()) - set(keep_puzzles)
for pid in not_keep:
del self.puzzles[pid]
def add_facts_to_puzzle(self, puzzle):
"""
For a given puzzle, add different types of facts
- 1 : Provide supporting facts. After creating the essential fact graph, expand on any
k number of edges (randomly)
- 2: Irrelevant facts: after creating the relevant fact graph, expand on an edge,
but only provide dangling expansions
- 3: Disconnected facts: along with relevant facts, provide a tree which is completely
separate from the proof path
- 4: Random attributes: school, place of birth, etc.
If unable to add the required facts, return False
Else, return the puzzle
:return:
"""
if self.args.noise_support:
# Supporting facts
# A <-> B <-> C ==> A <-> D <-> C , A <-> D <-> B <-> C
story = puzzle.story
extra_story = []
for se in story:
e_pair = self.expand_new(se)
if e_pair:
if puzzle.target_edge not in e_pair and e_pair[0][1] not in set([p for e in puzzle.story for p in e]):
extra_story.append(tuple(e_pair))
if len(extra_story) == 0:
return False
else:
# choose a sample of 1 to k-1 edge pairs
num_edges = random.choice(range(1, (len(story) // 2) + 1))
extra_story = random.sample(extra_story, min(num_edges, len(extra_story)))
# untuple the extra stories
extra_story = [k for e in extra_story for k in e]
self._test_supporting(story, extra_story)
puzzle.add_fact(fact_type='supporting', fact=extra_story)
if self.args.noise_irrelevant:
# Irrelevant facts
# A <-> B <-> C ==> A <-> D <-> E
# Must have only one common node with the story
story = puzzle.story
num_edges = len(story)
sampled_edge = random.choice(story)
extra_story = []
for i in range(num_edges):
tmp = sampled_edge
seen_pairs = set()
pair = self.expand_new(sampled_edge)
if pair:
while len(extra_story) == 0 and (tuple(pair) not in seen_pairs):
seen_pairs.add(tuple(pair))
for e in pair:
if e != puzzle.target_edge and not self._subset(story, [e], k=2):
extra_story.append(e)
sampled_edge = e
break
if tmp == sampled_edge:
sampled_edge = random.choice(story)
if len(extra_story) == 0:
return False
else:
# add a length restriction so as to not create super long text
# length restriction should be k+1 than the current k
extra_story = random.sample(extra_story, min(len(extra_story), len(story) // 2))
self._test_irrelevant(story, extra_story)
puzzle.add_fact(fact_type='irrelevant', fact=extra_story)
if self.args.noise_disconnected:
# Disconnected facts
story = puzzle.story
nodes_story = set([y for x in list(story) for y in x])
nodes_not_in_story = set(self.anc.family_data.keys()) - nodes_story
possible_edges = [(x, y) for x, y in it.combinations(list(nodes_not_in_story), 2) if
(x, y) in self.anc.family]
num_edges = random.choice(range(1, (len(story) // 2) + 1))
possible_edges = random.sample(possible_edges, min(num_edges, len(possible_edges)))
if len(possible_edges) == 0:
return False
self._test_disconnected(story, possible_edges)
puzzle.add_fact(fact_type='disconnected', fact=possible_edges)
return puzzle
def add_facts(self):
"""
For a given puzzle, add different types of facts
- 1 : Provide supporting facts. After creating the essential fact graph, expand on any
k number of edges (randomly)
- 2: Irrelevant facts: after creating the relevant fact graph, expand on an edge,
but only provide dangling expansions
- 3: Disconnected facts: along with relevant facts, provide a tree which is completely
separate from the proof path
- 4: Random attributes: school, place of birth, etc.
If unable to add the required facts, return False
:return:
"""
mark_ids_for_deletion = []
for puzzle_id in self.puzzles.keys():
puzzle = self.add_facts_to_puzzle(self.puzzles[puzzle_id])
if puzzle:
self.puzzles[puzzle_id] = puzzle
else:
mark_ids_for_deletion.append(puzzle_id)
for id in mark_ids_for_deletion:
del self.puzzles[id]
def precompute_expansions(self, edge_list, tp='family'):
"""
Given a list of edges, precompute the one level expansions on all of them
Given (x,y) -> get (x,z), (z,y) s.t. it follows our set of rules
Store the expansions as a list : (x,y) : [[(x,a),(a,y)], [(x,b),(b,y)] ... ]
:param edge_list:
:return:
"""
for edge in edge_list:
relation = self.anc.family[edge][tp]
if relation not in self.comp_rules_inv[tp]:
continue
rules = list(self.comp_rules_inv[tp][relation])
for rule in rules:
for node in self.anc.family_data.keys():
e1 = (edge[0], node)
e2 = (node, edge[1])
if e1 in self.anc.family and self.anc.family[e1][tp] == rule[0] \
and e2 in self.anc.family and self.anc.family[e2][tp] == rule[1]:
new_edge_pair = [e1, e2]
if edge not in self.expansions:
self.expansions[edge] = []
self.expansions[edge].append(new_edge_pair)
self.expansions[edge] = it.cycle(self.expansions[edge])
def expand_new(self, edge, tp='family'):
relation = self.anc.family[edge][tp]
if relation not in self.comp_rules_inv[tp]:
return None
if edge in self.expansions:
return self.expansions[edge].__next__()
else:
return None
def expand(self, edge, tp='family'):
"""
Given an edge, break the edge into two compositional edges from the given
family graph. Eg, if input is (x,y), break the edge into (x,z) and (z,y)
following the rules
:param edge: Edge to break
:param ignore_edges: Edges to ignore while breaking an edge. Used to ignore loops
:param k: if k == 0, stop recursing
:return:
"""
relation = self.anc.family[edge][tp]
if relation not in self.comp_rules_inv[tp]:
return None
rules = list(self.comp_rules_inv[tp][relation])
while len(rules) > 0:
rule = random.choice(rules)
rules.remove(rule)
for node in self.anc.family_data.keys():
e1 = (edge[0], node)
e2 = (node, edge[1])
if e1 in self.anc.family and self.anc.family[e1][tp] == rule[0] \
and e2 in self.anc.family and self.anc.family[e2][tp] == rule[1]:
return [e1, e2]
return None
def derive(self, edge_list, k=3):
"""
Given a list of edges, expand elements from the edge until we reach k
:param edge_list:
:param k:
:return:
"""
proof_trace = []
seen = set()
while k>0:
if len(set(edge_list)) - len(seen) == 0:
break
if len(list(set(edge_list) - seen)) == 0:
break
e = random.choice(list(set(edge_list) - seen))
seen.add(e)
ex_e = self.expand_new(e)
if ex_e and (ex_e[0] not in seen and ex_e[1] not in seen and ex_e[0][::-1] not in seen and ex_e[1][::-1] not in seen):
pos = edge_list.index(e)
edge_list.insert(pos, ex_e[-1])
edge_list.insert(pos, ex_e[0])
edge_list.remove(e)
#edge_list.extend(ex_e)
# format proof into human readable form
e = self._format_edge_rel(e)
ex_e = [self._format_edge_rel(x) for x in ex_e]
proof_trace.append({e:ex_e})
k = k-1
return edge_list, proof_trace
def _get_edge_rel(self, edge, rel_type='family'):
# get node attributes
node_b_attr = self.anc.family_data[edge[1]]
relation = self.anc.family[edge][rel_type]
edge_rel = self.relations_obj[relation][node_b_attr.gender]
return edge_rel
def get_edge_relation(self, edge, rel_type='family'):
node_b_attr = self.anc.family_data[edge[1]]
relation = self.anc.family[edge][rel_type]
edge_rel = self.relations_obj[relation][node_b_attr.gender]
return edge_rel['rel']
def _format_edge(self, edge):
"""
Given an edge (x,y), format it into (name(x), name(y))
:param edge:
:return:
"""
node_a_attr = self.anc.family_data[edge[0]]
node_b_attr = self.anc.family_data[edge[1]]
new_edge = (node_a_attr.name, node_b_attr.name)
return new_edge
def _format_edge_rel(self, edge, rel_type='family'):
"""
Given an edge (x,y), format it into (name(x), rel(x,y), name(y))
:param edge:
:return:
"""
node_a_attr = self.anc.family_data[edge[0]]
node_b_attr = self.anc.family_data[edge[1]]
edge_rel = self._get_edge_rel(edge, rel_type)['rel']
new_edge = (node_a_attr.name, edge_rel, node_b_attr.name)
return new_edge
def stringify(self, edge, rel_type='family'):
"""
Build story string from the edge
:param edge: tuple
:return:
"""
# get node attributes
node_a_attr = self.anc.family_data[edge[0]]
node_b_attr = self.anc.family_data[edge[1]]
relation = self._get_edge_rel(edge, rel_type)
placeholders = relation['p']
placeholder = random.choice(placeholders)
node_a_name = node_a_attr.name
node_b_name = node_b_attr.name
assert node_a_name != node_b_name
if self.boundary:
node_a_name = '[{}]'.format(node_a_name)
node_b_name = '[{}]'.format(node_b_name)
text = placeholder.replace('e_1', node_a_name)
text = text.replace('e_2', node_b_name)
return text + '. '
def generate_puzzles(self, weight=None):
"""
Prune the puzzles according to weight
Deprecated: puzzle generation logic moved to `build`
:return:
"""
self.prune_puzzles(weight)
def generate_question(self, query):
"""
Given a query edge, generate a textual question from the question placeholder bank
Use args.question to either generate a relational question or a yes/no question
:param query:
:return:
"""
# TODO: return a question from the placeholder
# TODO: future work
return ''
def _flatten_tuples(self, story):
return list(sum(story, ()))
def _unique_nodes(self, story):
return set(self._flatten_tuples(story))
def _subset(self, story, fact, k=2):
"""
Whether at least k fact nodes are present in a given story
:param story:
:param fact:
:return:
"""
all_entities = self._unique_nodes(story)
all_fact_entities = self._unique_nodes(fact)
return len(all_entities.intersection(all_fact_entities)) >= k
## Testing modules
def _test_story(self, story):
"""
Given a list of edges of the story, test whether they are logically valid
(x,y),(y,z) is valid, (x,y),(x,z) is not
:param story: list of tuples
:return:
"""
for e_i in range(len(story) - 1):
assert story[e_i][-1] == story[e_i + 1][0]
def _test_disconnected(self, story, fact):
"""
Given a story and the fact, check whether the fact is a disconnected fact
If irrelevant, then there would be no node match between story and fact
:param story: Array of tuples
:param fact: Array of tuples
:return:
"""
all_entities = self._unique_nodes(story)
all_fact_entities = self._unique_nodes(fact)
assert len(all_entities.intersection(all_fact_entities)) == 0
def _test_irrelevant(self, story, fact):
"""
Given a story and the fact, check whether the fact is a irrelevant fact
If irrelevant, then there would be exactly one node match between story and fact
:param story: Array of tuples
:param fact: Array of tuples
:return:
"""
all_entities = self._unique_nodes(story)
all_fact_entities = self._unique_nodes(fact)
assert len(all_entities.intersection(all_fact_entities)) == 1
def _test_supporting(self, story, fact):
"""
Given a story and the fact, check whether the fact is a irrelevant fact
If irrelevant, then there would be >= 2 node match between story and fact
:param story: Array of tuples
:param fact: Array of tuples
:return:
"""
all_entities = self._unique_nodes(story)
all_fact_entities =self._unique_nodes(fact)
assert len(all_entities.intersection(all_fact_entities)) >= 2
|
clutrr-main
|
clutrr/relations/builder.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
# File which was used in data collection from AMT using ParlAI-Mturk.
# Wrapper to communicate with backend database
# The database (Mongo) is used to maintain a set of collections
# - data we need to annotate : gold
# - dump for annotated data : mturk - this should also contain our manual tests
import os
from pymongo import MongoClient
from bson.objectid import ObjectId
import pandas as pd
import random
import glob
import schedule
import time
import datetime
import nltk
import subprocess
from numpy.random import choice
import argparse
KOUSTUV_ID = "A1W0QQF93UM08"
PORT = 27017
COLLECTION = 'amt_study'
GOLD_TABLE = 'gold'
MTURK_TABLE = 'mturk'
REVIEW_TABLE = 'review' # special table only used when we use review-only mode
USER_BASE = '/private/home/koustuvs/'
CLUTRR_BASE = USER_BASE + 'mlp/clutrr-2.0/'
SQLITE_BASE = CLUTRR_BASE + 'mturk/parlai/mturk/core/run_data/'
DRIVE_PATH = USER_BASE + 'Google Drive/clutrr/'
class DB:
def __init__(self, host='localhost', port=PORT, collection=COLLECTION, test_prob=0.0):
# initiate the db connection
self.client = MongoClient(host, port)
#print("Connected to backend MongoDB data at {}:{}".format(host, port))
self.gold = self.client[collection][GOLD_TABLE]
self.mturk = self.client[collection][MTURK_TABLE]
self.review = self.client[collection][REVIEW_TABLE]
self.test_prob = test_prob
self.test_worker = KOUSTUV_ID
def _read_csv(self, path):
assert path.endswith('.csv')
return pd.read_csv(path)
def upload(self, data_path, db='gold'):
"""
Given a csv file, upload the entire dataframe in the particular db
:param data:
:param db:
:return:
"""
print("Reading {}".format(data_path))
data = self._read_csv(data_path)
records = data.to_dict(orient='records')
# add used counter if gold and test
# add reviewed counter if mturk
num_records = len(records)
print("Number of records found : {}".format(len(records)))
for rec in records:
if db == 'gold':
rec['used'] = 0
else:
rec['reviewed'] = 0
sents = nltk.sent_tokenize(rec['story'])
rec['relation_length'] = len(sents)
mdb = getattr(self, db)
# prune the records which are already present in the database
keep_idx = []
for rec_idx, rec in enumerate(records):
fd = mdb.find({'id': rec['id']}).count()
if fd == 0:
keep_idx.append(rec_idx)
records = [records[idx] for idx in keep_idx]
num_kept = len(records)
print("Number of records already in db : {}".format(num_records - num_kept))
if len(records) > 0:
r = mdb.insert_many(records)
print("Inserted {} records in db {}".format(len(records), db))
def update_gender(self, data_path):
"""
Update the genders
:param data_path:
:return:
"""
print("Reading {}".format(data_path))
data = self._read_csv(data_path)
for i, row in data.iterrows():
self.mturk.update_many({'gold_id': ObjectId(row['_id'])}, {"$set": {'genders': row['genders']}}, upsert=False)
print('Updated {} records'.format(len(data)))
def choose_relation(self):
# unused records
avg_used = list(self.gold.aggregate([{'$group': {'_id': '$relation_length', 'avg': {'$avg': '$used'}}}]))
# normalize
avg = [rel['avg'] for rel in avg_used]
relations = [rel['_id'] for rel in avg_used]
# dont server relation 3 for a moment
#rel_idx = relations.index(3)
#del relations[rel_idx]
#del avg[rel_idx]
print("Found {} distinct relations".format(relations))
norm_avg = self._norm(avg)
# inverse the probability
delta = 0.01
norm_avg = [1 / i + delta for i in norm_avg]
norm_avg = self._norm(norm_avg)
rand_relation = int(choice(relations, 1, p=norm_avg)[0])
print("Choosing relation {}".format(rand_relation))
return rand_relation
def get_gold(self, rand_relation=None):
"""
Find the gold record to annotate.
Rotation policy: first randomly choose a relation_length, then choose the least used
annotation
:return:
"""
if not rand_relation:
rand_relation = self.choose_relation()
print("Randomly choosing {}".format(rand_relation))
record = self.gold.find_one({'relation_length': rand_relation}, sort=[("used",1)])
return record
def get_gold_by_id(self, id=''):
"""
Get a specific gold record by id
:param id:
:return:
"""
try:
record = self.gold.find_one({'_id': ObjectId(id)})
except:
record = None
return record
def _norm(self, arr):
s = sum(arr)
return [r/s for r in arr]
def get_peer(self, worker_id='test', relation_length=2):
"""
Get an annotation which is not done by the current worker, and which isn't reviewed
Also, no need to choose relation of length 1
With some probability, choose our test records
:param worker_id:
:param relation_length:
:return: None if no suitable candidate found
"""
using_test = False
record = None
if relation_length == 1:
relation_length = random.choice([2,3])
print("Choosing records with test probability {}".format(self.test_prob))
if random.uniform(0,1) <= self.test_prob:
using_test = True
record_cursor = self.mturk.find({'worker_id': self.test_worker, 'relation_length': relation_length},
sort=[("used",1)])
print("Choosing a test record to annotate")
else:
record_cursor = self.mturk.find({'worker_id': {"$nin": [worker_id, self.test_worker]}, 'relation_length': relation_length, 'used':1})
print("Choosing a review record to annotate")
rec_found = False
if record_cursor.count() > 0:
rec_found = True
print("Found a record to annotate")
if not using_test and not rec_found:
# if no candidate peer is found, default to test
record_cursor = self.mturk.find({'worker_id': self.test_worker, 'relation_length': relation_length},
sort=[("used",1)])
print("No records found, reverting back to test")
if record_cursor.count() > 0:
record = random.choice(list(record_cursor))
if not record:
# did not find either candidate peer nor test, raise error
raise FileNotFoundError("no candidate found in db")
return record
def save_review(self, record, worker_id, rating=0.0):
"""
Save the review. If its correct, then 1.0, else 0.0.
:param record:
:param rating:
:return:
"""
assert 'reviews' in record
assert 'reviewed_by' in record
record['used'] = len(record['reviewed_by']) + 1
record['reviewed_by'].append({worker_id: rating})
self.mturk.update_one({'_id': record['_id']}, {"$set": record}, upsert=False)
def save_annotation(self, record, worker_id):
""" Save the user annotation
"""
if 'worker_id' not in record:
record['worker_id'] = ''
record['worker_id'] = worker_id
if 'reviews' not in record:
record['reviews'] = 0
record['reviews'] = 0
if 'reviewed_by' not in record:
record['reviewed_by'] = []
record['reviewed_by'] = []
record['used'] = 0
# change the id
record['gold_id'] = record['_id']
del record['_id']
self.mturk.insert_one(record)
self.gold.update_one({'_id': record['gold_id']}, {'$inc': {'used': 1}}, upsert=False)
def done_review(self, worker_id, assignment_id, task_group_id):
"""
Mark with timestamp when a worker has done a review
:param worker_id:
:return:
"""
self.review.insert_one({'worker_id':worker_id,
'assignment_id': assignment_id,
'task_group_id':task_group_id,
'accepted': ''})
def import_data(self):
path = CLUTRR_BASE + 'mturk_data/*'
print("Checking the path: {}".format(path))
files = glob.glob(path)
print("Files found : {}".format(len(files)))
for fl in files:
if fl.endswith('gold.csv'):
self.upload(fl, db='gold')
if fl.endswith('mturk.csv'):
self.upload(fl, db='mturk')
def export(self, base_path=CLUTRR_BASE, batch_size=100):
"""
Dump datasets into csv
:return:
"""
print("Exporting datasets ...")
gold = pd.DataFrame(list(self.gold.find()))
gold_path = os.path.join(base_path,"amt_gold.csv")
mturk_path = base_path
mturk = pd.DataFrame(list(self.mturk.find()))
print("Gold : {} records to {}".format(len(gold), gold_path))
print("Mturk : {} records to {}".format(len(mturk), mturk_path))
gold.to_csv(gold_path)
# save data in batches
mturk_splits = splitDataFrameIntoSmaller(mturk, chunkSize=batch_size)
for i, mturk_b in enumerate(mturk_splits):
mturk_b.to_csv(os.path.join(mturk_path, "amt_mturk_{}.csv".format(i)))
def export_mongodb(self, path=CLUTRR_BASE):
"""
Export the entire mongodb dump to location, preferably a google drive
:param path:
:return:
"""
print("Exporting local mongodb to {}".format(path))
command = "mongodump --db {} --out {} --gzip".format(COLLECTION, path)
res = subprocess.run(command.split(" "), stdout=subprocess.PIPE)
print(res)
def export_sqlite(self, path=CLUTRR_BASE, sqlite_path=SQLITE_BASE):
"""
Zip and export the sqlite database in sqlite path
:param path:
:return:
"""
print("Export local sqlite db to {}".format(path))
command = "zip -q -r {}/run_data.zip {}".format(path, sqlite_path)
res = subprocess.run(command.split(" "), stdout=subprocess.PIPE)
print(res)
def update_relation_length(self):
print("Updating...")
gold = self.gold.find({})
up = 0
for rec in gold:
rec['relation_length'] = len(nltk.sent_tokenize(rec['story']))
self.gold.update_one({'_id': rec['_id']}, {"$set": rec}, upsert=False)
up += 1
mturk = self.mturk.find({})
for rec in mturk:
rec['relation_length'] = len(nltk.sent_tokenize(rec['story']))
self.mturk.update_one({'_id': rec['_id']}, {"$set": rec}, upsert=False)
up += 1
print("Updated {} records".format(up))
def close_connections(self):
#print("Closing connection")
self.client.close()
def import_job():
data = DB(port=PORT)
data.import_data()
data.close_connections()
def export_job(folder, batch_size=100):
save_path = os.path.join(CLUTRR_BASE, folder)
if not os.path.exists(save_path):
os.mkdir(save_path)
data = DB(port=PORT)
data.export(base_path=save_path, batch_size=batch_size)
save_user_path = os.path.join(USER_BASE, folder)
if not os.path.exists(save_user_path):
os.mkdir(save_user_path)
data.export(base_path=save_user_path, batch_size=batch_size)
data.close_connections()
def backup_job():
data = DB(port=PORT)
data.export_mongodb()
data.export_sqlite()
def info_job():
data = DB(port=PORT)
print("Generating statistics at {}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
gold_c = data.gold.find({}).count()
pending_c = data.gold.count_documents({'used':0})
avg_used = list(data.gold.aggregate([{'$group': {'_id':None,'avg' : {'$avg' : '$used'}}}]))
if len(avg_used) > 0:
avg_used = avg_used[0]['avg']
mturk_c = data.mturk.count_documents({})
uniq_workers = len(data.mturk.find({}).distinct("worker_id"))
mturk_c_1 = data.mturk.count_documents({'relation_length':1})
gold_agg = list(data.gold.aggregate([{'$group': {'_id': {'relation_length': '$relation_length', 'f_comb': '$f_comb'},
'avg' : {'$avg' : '$used'}}}, {'$sort': {"_id.relation_length": 1}}]))
mturk_reviews = list(data.mturk.aggregate([{'$group': {'_id': None, 'total_rev': {'$sum': {'$size': '$reviewed_by'}}}}]))
for rec in gold_agg:
if rec['_id']['relation_length'] != 3:
print(rec['_id']['relation_length'], '\t', rec['_id']['f_comb'], '\t', rec['avg'])
mturk_c_2 = data.mturk.count_documents({'relation_length':2})
#gold_c_2_u = list(data.gold.aggregate([{'$group': {'_id':None,'relation_length':2, 'avg' : {'$avg' : '$used'}}}]))[0]['avg']
mturk_c_3 = data.mturk.count_documents({'relation_length':3})
#gold_c_3_u = list(data.gold.aggregate([{'$group': {'_id':None,'relation_length':3, 'avg' : {'$avg' : '$used'}}}]))[0]['avg']
print("Number of gold data : {} \n ".format(gold_c) +
"Number of pending rows to annotate : {} \n ".format(pending_c) +
"Average times each gold row has been used : {} \n ".format(avg_used) +
"Number of annotations given : {} \n".format(mturk_c) +
"Unique workers : {}\n".format(uniq_workers) +
"Number of 1 relations annotated : {}\n".format(mturk_c_1) +
"Number of 2 relations annotated : {}\n".format(mturk_c_2) +
"Number of 3 relations annotated : {}\n".format(mturk_c_3) +
"Total reviews provided : {}\n".format(mturk_reviews[0]['total_rev']))
def update_genders():
data = DB(port=PORT)
data.update_gender('/private/home/koustuvs/mlp/clutrr-2.0/amt_gold_gender.csv')
data.close_connections()
def test_get_gold(k=100):
data = DB(port=PORT)
rel_chosen = {1:0,2:0,3:0}
for i in range(k):
record = data.get_gold()
rel_chosen[record['relation_length']] +=1
print(rel_chosen)
data.close_connections()
def splitDataFrameIntoSmaller(df, chunkSize = 10000):
listOfDf = list()
numberChunks = len(df) // chunkSize + 1
for i in range(numberChunks):
listOfDf.append(df[i*chunkSize:(i+1)*chunkSize])
return listOfDf
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# graph parameters
parser.add_argument("--server", action='store_true', help="start the server")
parser.add_argument("--import_db", default='', type=str, help="Import the files to server")
parser.add_argument("--batch_size", default=100, type=int, help="Export batch size")
parser.add_argument("--schedule_interval", default=10, type=int, help="schedule interval minutes")
parser.add_argument("--save_folder", default='amt_annotated_data', type=str, help="data location")
args = parser.parse_args()
if len(args.import_db) > 0:
import_job()
if args.server:
export_job(args.save_folder, batch_size=args.batch_size)
info_job()
#backup_job()
print("Scheduling jobs...")
schedule.every(args.schedule_interval).minutes.do(export_job, args.save_folder, batch_size=args.batch_size)
schedule.every(args.schedule_interval).minutes.do(info_job)
# redundant backups
schedule.every().day.at("23:00").do(backup_job)
while True:
schedule.run_pending()
time.sleep(1)
|
clutrr-main
|
clutrr/utils/data_backend.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
# Split the test files into their own task specific files
# Not required in actual data generation
import pandas as pd
import os
import glob
import json
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# graph parameters
parser.add_argument("--data_folder", default='data_emnlp', type=str, help="data folder")
args = parser.parse_args()
base_path = os.path.abspath(os.path.join(os.pardir, os.pardir))
print(base_path)
# search for directories
dirs = glob.glob(os.path.join(base_path, args.data_folder, '*'))
dirs = [dir for dir in dirs if os.path.isdir(dir)]
print("Found {} directories".format(len(dirs)))
print(dirs)
for folder in dirs:
# read config file
config = json.load(open(os.path.join(folder, 'config.json')))
# get test_file
test_files = glob.glob(os.path.join(folder, '*_test.csv'))
# get splittable test files
test_files = [t for t in test_files if len(t.split(',')) > 1]
for test_file in test_files:
df = pd.read_csv(test_file)
test_fl_name = test_file.split('/')[-1]
tasks = df.task_name.unique()
for task in tasks:
dft = df[df.task_name == task]
tname = task.split('task_')[-1]
flname = tname + '_test.csv'
dft.to_csv(os.path.join(folder, flname))
config['args'][flname] = config['args'][test_fl_name]
config['test_tasks'][tname] = test_fl_name
del config['args'][test_fl_name]
json.dump(config, open(os.path.join(folder, 'config.json'),'w'))
# backup the original test_files
for test_file in test_files:
os.rename(test_file, test_file.replace('_test','_backupt'))
print("splitting done")
|
clutrr-main
|
clutrr/utils/test_splitter.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
# file to create and maintain an index.html file which will contain a table of datasets for easy maintainance
import glob
import json
import os
import requests
import datetime
import pandas as pd
import argparse
template_header = '''
<html><head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimal-ui">
<title>CLUTRR Dataset List</title>
<link rel="stylesheet" href="style.css">
<style>
body {
box-sizing: border-box;
min-width: 200px;
max-width: 980px;
margin: 0 auto;
padding: 45px;
}
</style>
</head>
<body>
<article class="markdown-body">
<h1><a id="user-content-github-markdown-css-demo" class="anchor" href="#github-markdown-css-demo" aria-hidden="true"><span class="octicon octicon-link"></span></a>CLUTRR v2.0 Dataset List</h1>
<p><a name="user-content-headers"></a></p><a name="user-content-headers">
</a>
<p>Contains the list of datasets and their generation configuration.</p>
<table><thead>
<tr>
<th>Dataset name</th>
<th>Name</th>
<th align="center">Training</th>
<th aligh="right">Number of Training rows</th>
<th align="right">Testing</th>
<th align="right">Number of Testing rows</th>
<th align="right">Time created</th>
<th align="right">Holdout</th>
</tr>
</thead><tbody>
'''
template_footer = '''
</tbody></table>
<p>For questions, contact Koustuv Sinha. A csv of this table is <a href="{}">available here.</a></p>
</article>
</body></html>
'''
#
CSS_TEMPLATE = 'https://sindresorhus.com/github-markdown-css/github-markdown.css'
def generate_webpage(data_path):
"""
Reads the list of directories, reads their config file, and generates a Github flavored webpage
<tr>
<td></td>
<td></td>
<td></td>
</tr>
:return:
"""
folders = glob.glob(os.path.join(data_path, '*', ''))
print("Found {} folders.".format(len(folders)))
web_page = template_header
generated_at = '<p>This webpage is autogenerated at {}</p>'.format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M"))
data_names = []
unames = []
train = []
test = []
num_train = []
num_test = []
times = []
holdouts = []
for folder in folders:
print('Reading {}'.format(folder))
config = json.load(open(os.path.join(folder, 'config.json')))
train_task = config['train_task'].keys()
test_tasks = config['test_tasks'].keys()
train_rows = sum([config['args'][config['train_task'][tr]]['num_rows'] for tr in train_task])
test_rows = sum([config[config['test_tasks'][tr]]['num_rows'] for tr in test_tasks])
one_tt = list(train_task)[0]
name = folder.split('/')[-2]
name_url = '<a href={}>{}</a>'.format(name + '.zip', name)
gen_time = datetime.datetime.fromtimestamp(os.stat(folder).st_mtime).strftime("%y-%m-%d / %H:%M")
holdout = ','.join([config['args'][config['train_task'][tr]]['holdout'] if 'holdout' in config['args'][config['train_task'][tr]] else 'None' for tr in train_task])
data_names.append(config['args'][config['train_task'][one_tt]]['data_name'])
unames.append(name_url)
train.append(','.join(train_task))
num_train.append(train_rows)
num_test.append(test_rows)
test.append(','.join(test_tasks))
times.append(gen_time)
holdouts.append(holdout)
df = pd.DataFrame(data={'data_name': data_names, 'unames': unames, 'train': train, 'test':test, 'num_train':num_train, 'num_test':num_test, 'times':times, 'holdout':holdouts})
df.sort_values(by=['times'], inplace=True)
data_csv = os.path.join(data_path, 'dataset_details.csv')
df.to_csv(data_csv)
for i,row in df.iterrows():
row_web = '<tr><td>{}</td><td>{}</td><td>{}</td><td>{}</td><td>{}</td><td>{}</td><td>{}</td><td>{}</td></tr>'.format(
row['data_name'], row['unames'], row['train'], row['num_train'], row['test'], row['num_test'], row['times'], row['holdout'])
web_page += row_web
web_page += generated_at
web_page += template_footer.format('dataset_details.csv')
css = requests.get(CSS_TEMPLATE).text
with open(os.path.join(data_path, 'style.css'), 'w') as fp:
fp.write(css)
with open(os.path.join(data_path, 'index.html'), 'w') as fp:
fp.write(web_page)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--output_dir", type=str, default="/home/ml/ksinha4/clutrr/data", help="output_dir")
args = parser.parse_args()
generate_webpage(args.output_dir)
|
clutrr-main
|
clutrr/utils/web.py
|
clutrr-main
|
clutrr/utils/__init__.py
|
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
import itertools as it
import numpy as np
import csv
import pandas as pd
import random
def pairwise(iterable):
"""
Recipe from itertools
:param iterable:
:return: "s -> (s0,s1), (s1,s2), (s2, s3), ..."
"""
a, b = it.tee(iterable)
next(b, None)
return zip(a, b)
def prob_dist(rows):
row_dict = {}
for row in rows:
if row[-1] not in row_dict:
row_dict[row[-1]] = []
row_dict[row[-1]].append(row[:2])
rel_probs = {k: (len(v) / len(rows)) for k, v in row_dict.items()}
return rel_probs
def split_train_test(args, rows):
# split training testing
r1 = prob_dist(rows)
indices = range(len(rows))
mask_i = np.random.choice(indices,
int(len(indices) * args.train_test_split),
replace=False)
test_indices = [i for i in indices if i not in set(mask_i)]
train_indices = [i for i in indices if i in set(mask_i)]
train_rows = [rows[ti] for ti in train_indices]
r_train = prob_dist(train_rows)
test_rows = [rows[ti] for ti in test_indices]
r_test = prob_dist(test_rows)
train_rows = [row[:-1] for row in train_rows]
test_rows = [row[:-1] for row in test_rows]
return train_rows, test_rows
def write2file(args, rows, filename):
with open(filename, 'w') as fp:
for argi in vars(args):
fp.write('# {} {}\n'.format(argi, getattr(args, argi)))
writer = csv.writer(fp)
writer.writerow(['story','summary'])
for row in rows:
writer.writerow(row)
def sanity_check(filename, rows):
## sanity check
df = pd.read_csv(filename, skip_blank_lines=True, comment='#')
print('Total rows : {}'.format(len(df)))
assert len(rows) == len(df)
class CDS:
def combinationSum(self, candidates, target):
res = []
candidates.sort()
self.dfs(candidates, target, 0, [], res)
return res
def dfs(self, nums, target, index, path, res):
if target < 0:
return # backtracking
if target == 0:
res.append(path)
return
for i in range(index, len(nums)):
self.dfs(nums, target - nums[i], i, path + [nums[i]], res)
class unique_element:
def __init__(self, value, occurrences):
self.value = value
self.occurrences = occurrences
def perm_unique(elements):
eset = set(elements)
listunique = [unique_element(i, elements.count(i)) for i in eset]
u = len(elements)
return perm_unique_helper(listunique, [0] * u, u - 1)
def perm_unique_helper(listunique, result_list, d):
if d < 0:
yield tuple(result_list)
else:
for i in listunique:
if i.occurrences > 0:
result_list[d] = i.value
i.occurrences -= 1
for g in perm_unique_helper(listunique, result_list, d - 1):
yield g
i.occurrences += 1
def comb_indexes(sn, max_seq_len=3):
"""
Idea here is to generate all combinations maintaining the order
Eg, [a,b,c,d] => [[a],[b],[c],[d]], [[a,b],[c],[d]], [[a,b,c],[d]], etc ...
where the max sequence is max_seq_len
:param sn:
:param max_seq_len:
:return:
"""
s_n = len(sn)
cd = CDS()
some_comb = cd.combinationSum(list(range(1,max_seq_len+1)),s_n)
all_comb = [list(perm_unique(x)) for x in some_comb]
all_comb = [y for r in all_comb for y in r]
pairs = []
for pt in all_comb:
rsa = []
stt = 0
for yt in pt:
rsa.append(sn[stt:stt+yt])
stt += yt
pairs.append(rsa)
return pairs
def choose_random_subsequence(sn, max_seq_len=3):
return random.choice(comb_indexes(sn, max_seq_len))
|
clutrr-main
|
clutrr/utils/utils.py
|
clutrr-main
|
clutrr/actors/__init__.py
|
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
import numpy as np
import names
import copy
import random
from clutrr.actors.actor import Actor, Entity
from clutrr.store.store import Store
#store = Store()
class Ancestry:
"""
Ancestry of people to simulate
Class to create a skeleton graph
Changes:
- Maintain a dictionary instead of networkx graph.
- The keys to the dictionary will be (node_id_x, node_id_y) : a dict of relations
- a dict of relations will ensure the use of family, work, etc different relations logically seperate
- key of the relations:
- "family" --> family type relations
- "work" --> work related relations
- Maintain a separate dictionary for mapping of node_id to details
- Relation keyword to be taken from rules_store
"""
def __init__(self, args, store:Store,
relationship_type={'SO':1,'child':2}, taken_names=None):
self.family = {} # dict (node_id_a, node_id_b) : rel dict
self.family_data = {} # dict to hold node_id details
self.work_data = {} # dict to hold work location id details
self.store = store
self.max_levels = args.max_levels
self.min_child = args.min_child
self.max_child = args.max_child
self.p_marry = args.p_marry
self.relationship_type = relationship_type
self.levels = 0 # keep track of the levels
self.node_ct = 0
self.flipped = [] # track of nodes which are gender flipped
self.taken_names = taken_names if taken_names else copy.deepcopy(self.store.attr_names) # keep track of names which are already taken
self.simulate()
#self.add_work_relations()
def simulate(self):
"""
Main function to run the simulation to create a family tree
:return:
"""
self.node_ct = 0
self.levels = random.randint(1,self.max_levels)
# we are root, for now just add one head of family
gender = 'male'
nodes = self.add_members(gender=gender, num=1)
parents = nodes
for level in range(self.max_levels):
# build generation
generation_nodes = []
for node in parents:
# marry with probability p_marry
decision_marry = np.random.choice([True,False],1,p=[self.p_marry, 1-self.p_marry])
if decision_marry:
# add the partner
nodes = self.add_members(gender=self.toggle_gender(node), num=1)
self.make_relation(node, nodes[0], relation='SO')
# always leave the last level as single children
if level != self.max_levels - 1:
# add the children for this parent
num_childs = random.randint(self.min_child, self.max_child)
child_nodes = self.add_members(num=num_childs)
if len(child_nodes) > 0:
for ch_node in child_nodes:
self.make_relation(node, ch_node, relation='child')
self.make_relation(nodes[0], ch_node, relation='child')
generation_nodes.extend(child_nodes)
parents = generation_nodes
def add_members(self, gender='male', num=1):
"""
Add members into family
:param gender: male/female. if num > 1 then randomize
:param num: default 1.
:return: list of node ids added, new node id
"""
node_id = self.node_ct
added_nodes = []
for x in range(num):
if num > 1:
gender = random.choice(['male', 'female'])
# select a name that is not taken
name = names.get_first_name(gender=gender)
while name in self.taken_names:
name = names.get_first_name(gender=gender)
self.taken_names.add(name)
node = Actor(
name=name, gender=gender, node_id=node_id, store=self.store)
added_nodes.append(node)
self.family_data[node_id] = node
node_id += 1
self.node_ct = node_id
return added_nodes
def make_relation(self, node_a, node_b, relation='SO'):
"""
Add a relation between two nodes
:param node_a: integer id of the node
:param node_b: integer id of the node
:param relation: either SO->1, or child->2
:return:
"""
node_a_id = node_a.node_id
node_b_id = node_b.node_id
rel_tuple = (node_a_id, node_b_id)
if rel_tuple not in self.family:
self.family[rel_tuple] = {'family': relation}
def toggle_gender(self, node):
if node.gender == 'male':
return 'female'
else:
return 'male'
def print_family(self):
ps = ','.join(["{}.{}.{}".format(k, v.name[0], v.gender) for k,v in self.family_data.items()])
return ps
def next_flip(self):
"""
Given an ancestry,
- maintain a set of nodes who have already been gender flipped
- sample one node to flip from the rest
- check if the node contains a SO relationship. if so, toggle both
- add the flipped nodes into the already flipped pile
- if no nodes are left, then return False. else return True
:return:
"""
candidates = list(set(self.family_data.keys()) - set(self.flipped))
if len(candidates) == 0:
# all candidates flipped already
# reset flip
self.flipped = []
else:
node = random.choice(candidates)
relations_with_node = [node_pair for node_pair in self.family.keys() if node_pair[0] == node]
SO_relation = [node_pair for node_pair in relations_with_node if self.family[node_pair]['family'] == 'SO']
assert len(SO_relation) <= 1
if len(SO_relation) == 1:
so_node = SO_relation[0][1]
# flip both
self.family_data[node].gender = self.toggle_gender(self.family_data[node])
self.family_data[so_node].gender = self.toggle_gender(self.family_data[so_node])
# exchange their names too
tmp_name = self.family_data[node].name
self.family_data[node].name = self.family_data[so_node].name
self.family_data[so_node].name = tmp_name
self.flipped.append(node)
self.flipped.append(so_node)
#print("flipping couples ...")
#print("Flipped {} to {}".format(node, self.family_data[node].gender))
#print("Flipped {} to {}".format(so_node, self.family_data[so_node].gender))
else:
# only childs, flip them
self.family_data[node].gender = self.toggle_gender(self.family_data[node])
# choose a new gender appropriate name
gender = self.family_data[node].gender
while name in self.taken_names:
name = names.get_first_name(gender=gender)
self.family_data[node].name = name
self.flipped.append(node)
#print("flipping singles ...")
#print("Flipped {} to {}".format(node, self.family_data[node].gender))
def add_work_relations(self, w=0.3):
"""
Policy of adding working relations:
- Add w work locations
- Divide the population into these w bins
- Add works_at relation
- Within each bin:
- Assign m managers
:return:
"""
num_pop = len(self.family_data)
pop_ids = self.family_data.keys()
work_locations = random.sample(self.store.attribute_store['work']['options'], int(num_pop * w))
node_ct = self.node_ct
work_bins = {}
pop_per_loc = num_pop // len(work_locations)
for wl in work_locations:
self.work_data[node_ct] = Entity(name=wl, etype='work')
w = random.sample(pop_ids, pop_per_loc)
pop_ids = list(set(pop_ids) - set(w))
work_bins[wl] = {"id": node_ct, "w": w}
node_ct+=1
if len(pop_ids) > 0:
work_bins[work_locations[-1]]["w"].extend(pop_ids)
self.node_ct = node_ct
for wl in work_locations:
e_id = work_bins[wl]["id"]
pops = work_bins[wl]["w"]
for p in pops:
edge = (e_id, p)
if edge not in self.family:
self.family[edge] = {'family':'', 'work': []}
if 'work' not in self.family[edge]:
self.family[edge]['work'] = []
self.family[edge]['work'].append('works_at')
# select manager
manager = random.choice(pops)
for p in pops:
edge = (p, manager)
if edge not in self.family:
self.family[edge] = {'family':'', 'work': []}
if 'work' not in self.family[edge]:
self.family[edge]['work'] = []
self.family[edge]['work'].append('manager')
if __name__=='__main__':
#pdb.set_trace()
anc = Ancestry()
anc.add_work_relations()
|
clutrr-main
|
clutrr/actors/ancestry.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
import random
class Actor:
"""
male or female actor
"""
def __init__(self, gender='male', name='', node_id=0, store={}):
self.gender = gender
self.name = name
self.node_id = node_id
## irrelevant attributes
## also make the irrelevant attributes random. Not every entity will have them all
self.attributes = {
'school' : '',
'location_born' : '',
'social_media_active' : False,
'social_media_preferred': '',
'political_views' : '',
'hobby' : '',
'sport': '',
}
self.attribute_store = store.attribute_store
self.fill_attributes()
def fill_attributes(self):
for key,val in self.attribute_store.items():
random_val = random.choice(val['options'])
random_attr = '[{}]'.format(random_val)
name = '[{}]'.format(self.name)
random_placeholder = random.choice(val['placeholders'])
text = random_placeholder.replace('e_x', name).replace('attr_x', random_attr) + ". "
self.attributes[key] = text
def __repr__(self):
return "<Actor name:{} gender:{} node_id:{}".format(
self.name, self.gender, self.node_id)
def __str__(self):
return "Actor node, name: {}, gender : {}, node_id : {}".format(
self.name, self.gender, self.node_id
)
class Entity:
"""
work or related entities
etype="work"
"""
def __init__(self, name='', etype='', node_id=0):
self.name = name
self.etype = etype
self.node_id = node_id
def __repr__(self):
return "<Entity name:{} etype: {} node_id:{}".format(
self.name, self.etype, self.node_id)
def __str__(self):
return "Entity node, name: {}, etype: {}, node_id : {}".format(
self.name, self.etype, self.node_id
)
|
clutrr-main
|
clutrr/actors/actor.py
|
"""
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
import os
import json
import yaml
class Store:
def __init__(self,args):
attribute_store = args.attribute_store if args.attribute_store else 'attribute_store.json'
relations_store = args.relations_store if args.relations_store else 'relations_store.json'
question_store = args.question_store if args.question_store else 'question_store.json'
rules_store = args.rules_store if args.rules_store else 'rules_store.yaml'
self.base_path = os.path.dirname(os.path.realpath(__file__)).split('store')[0]
self.attribute_store = json.load(open(os.path.join(self.base_path, 'store', attribute_store)))
self.relations_store = yaml.load(open(os.path.join(self.base_path, 'store', relations_store)))
self.question_store = yaml.load(open(os.path.join(self.base_path, 'store', question_store)))
self.rules_store = yaml.load(open(os.path.join(self.base_path, 'store', rules_store)))
# TODO: do we need this?
## Relationship type has basic values 0,1 and 2, whereas the
## rest should be inferred. Like, child + child = 4 = grand
self.relationship_type = {
'SO': 1,
'child': 2,
'sibling': 0,
'in-laws': 3,
'grand': 4,
'no-relation': -1
}
attr_names = [v["options"] for k,v in self.attribute_store.items()]
self.attr_names = set([x for p in attr_names for x in p])
|
clutrr-main
|
clutrr/store/store.py
|
clutrr-main
|
clutrr/store/__init__.py
|
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
import subprocess
from subprocess import check_output
import os
import embeddings
class VecMap:
"""
wrapper for vecmap https://github.com/artetxem/vecmap
assumes vecmap is in the directory ./vecmap
"""
def __init__(self, srcvec, tgtvec, dictpath, outdir, config):
self.srcvec = srcvec
self.tgtvec = tgtvec
self.outdir = outdir
self.dictpath = dictpath
self.flags = ''
self.config = config
def add_flag(v):
self.flags += ' ' + v
add_flag('--verbose')
add_flag('--orthogonal')
# default is 50, but I want faster
add_flag('--stochastic_interval 3')
add_flag('--csls 10')
if dictpath is not None:
add_flag(f'--validation {dictpath}')
logdir = os.path.join(self.outdir, 'vecmap.log')
add_flag(f'--log {logdir}')
if config.supervision == 'identical':
add_flag('--identical')
elif config.supervision == 'init_identical':
add_flag('--init_identical')
elif config.supervision == 'numeral':
add_flag('--init_numeral')
elif config.supervision == 'unsupervised':
add_flag('--init_unsupervised')
add_flag('--unsupervised')
else:
raise Exception('invalid type of supervision: ' + config.supervision)
# if config.init_dict:
# add_flag(f'--dictionary {config.init_dict}')
def run(self):
srcvec = self.srcvec
tgtvec = self.tgtvec
srcvec_out = os.path.join(self.outdir, 'src.out.vec')
tgtvec_out = os.path.join(self.outdir, 'tgt.out.vec')
cmd = f'python vecmap/map_embeddings.py {srcvec} {tgtvec} {srcvec_out} {tgtvec_out} {self.flags} --verbose'
print(cmd)
process = subprocess.Popen(cmd.split())
output, error = process.communicate()
def vecs(self):
srcvec_out = os.path.join(self.outdir, 'src.out.vec')
tgtvec_out = os.path.join(self.outdir, 'tgt.out.vec')
srcfile = open(srcvec_out, encoding='utf-8', errors='surrogateescape')
tgtfile = open(tgtvec_out, encoding='utf-8', errors='surrogateescape')
src_words, srcvec = embeddings.read(srcfile)
tgt_words, tgtvec = embeddings.read(tgtfile)
return srcvec, tgtvec
def eval(self, dictpath):
srcvec_out = os.path.join(self.outdir, 'src.out.vec')
tgtvec_out = os.path.join(self.outdir, 'tgt.out.vec')
cmd = f'python vecmap/eval_translation.py {srcvec_out} {tgtvec_out} -d {dictpath} --retrieval csls -k 10'
print(cmd)
out = check_output(cmd.split())
def cov_acc(sout):
toks = sout.decode('utf8').replace(': ', ' ').replace(':', ' ').split()
print(sout)
cov = float(toks[1].strip('%'))
acc = float(toks[3].strip('%'))
return ({'accuracy': acc, 'coverage': cov})
return cov_acc(out)
|
coocmap-main
|
baselines.py
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
from typing import Optional
import collections
import numpy as np
import pandas as pd
from tokenizers import Tokenizer
# faithfully recreate the protocol of vecmap with minimal code modifications
def vecmap_evaluate(sim: np.ndarray, tokenizer1: Tokenizer, tokenizer2: Tokenizer, refpath: str):
# https://github.com/artetxem/vecmap/blob/master/map_embeddings.py#L225
# precision only, count oovs
with open(refpath, encoding='utf-8', errors='surrogateescape') as f:
validation = collections.defaultdict(set)
oov = set()
vocab = set()
for line in f:
try:
src, trg = line.split()
except ValueError:
continue
try:
src_ind = tokenizer1.token_to_id(src)
trg_ind = tokenizer2.token_to_id(trg)
if src_ind is None or trg_ind is None:
raise KeyError
if src_ind >= sim.shape[0] or trg_ind >= sim.shape[1]:
raise KeyError
validation[src_ind].add(trg_ind)
vocab.add(src)
except KeyError:
oov.add(src)
oov -= vocab # If one of the translation options is in the vocabulary, then the entry is not an oov
validation_coverage = len(validation) / (len(validation) + len(oov))
# https://github.com/artetxem/vecmap/blob/master/map_embeddings.py#L383
src = list(validation.keys())
# xw[src].dot(zw.T, out=simval)
srct = [s for s in src if s < sim.shape[0]]
simval = sim[srct]
nn = np.nanargmax(simval, axis=1)
accuracy = np.mean([1 if nn[i] in validation[src[i]] else 0 for i in range(len(src))])
similarity = np.mean([max([simval[i, j].tolist() for j in validation[src[i]]]) for i in range(len(src))])
return {'accuracy': accuracy, 'similarity': similarity, 'coverage': validation_coverage}
def get_refdict(refpath):
with open(refpath, encoding='utf-8', errors='surrogateescape') as f:
val = collections.defaultdict(set)
for line in f:
try:
src, trg = line.split()
except ValueError:
continue
val[src].add(trg)
return val
def report_sim(sim: np.ndarray, tokenizer1: Tokenizer, tokenizer2: Tokenizer, refpath: Optional[str]):
# ind_src = np.arange(sim.shape[0])
kth = range(3)
ind_tgt = np.argpartition(-sim, kth, axis=1)
res = []
maxes = []
stats = {}
if refpath is not None:
refdict = get_refdict(refpath)
# keys: accuracy, coverage, similarity
vecmapres = vecmap_evaluate(sim, tokenizer1, tokenizer2, refpath)
stats = vecmapres
else:
refdict = collections.defaultdict(set)
for i in range(sim.shape[0]):
char = tokenizer1.id_to_token(i)
pred = tokenizer2.id_to_token(ind_tgt[i][0])
preds = ' '.join(tokenizer2.id_to_token(j) for j in ind_tgt[i][kth])
gap = sim[i][ind_tgt[i][0]] - sim[i][ind_tgt[i][1]]
maxes.append(sim[i][ind_tgt[i][0]])
res.append({
'char': char,
# 'id': i,
'pred': pred,
'preds': preds,
'eq': char == pred,
# 'gap': gap,
# 'max': maxes[i],
'correct': pred in refdict[char],
'refs': ' '.join(refdict[char])
})
# print(res)
df = pd.DataFrame.from_records(res)
neq = len(df[df['char'] == df['pred']])
ncorrect = len(df[df['correct']==True])
stats['nidentical'] = neq
stats['mean_max'] = np.mean(maxes)
stats['ncorrrect'] = ncorrect
# print(stats)
return df, stats
def _dict_to_inds(refpath, tok1, tok2, full=False):
refdict = get_refdict(refpath)
for src, trgs in refdict.items():
src_ind = tok1.token_to_id(src)
if src_ind is None:
continue
trg_inds = [tok2.token_to_id(trg) for trg in trgs]
trg_inds = [trg_ind for trg_ind in trg_inds if trg_ind is not None]
if full:
for trg_ind in trg_inds:
yield src_ind, trg_ind
elif len(trg_inds) > 0:
trg_ind = trg_inds[0]
yield src_ind, trg_ind
def dict_to_inds(refpath, tok1, tok2, full=False):
return list(zip(*_dict_to_inds(refpath, tok1, tok2, full=full)))
def label_preds(preds, refpath: Optional[str]):
# ind_src = np.arange(sim.shape[0])
if refpath is not None:
refdict = get_refdict(refpath)
print('size of dictionary', len(refdict.keys()))
res = []
for w, v in preds:
res.append(
{
'src': w,
'trg': v,
'correct': v in refdict[w],
'wrong': w in refdict and v not in refdict[w],
'identical': w == v,
'refs': ' '.join(refdict[w]),
}
)
ws.append(w)
if len(ws) != len(set(ws)):
print('WARNING: duplicate words exist in the predictions')
# print(res)
df = pd.DataFrame.from_records(res)
def boolcount(prop):
return len(df[df[prop]==True])
nidentical = boolcount('identical')
ncorrect = boolcount('correct')
nwrong= boolcount('wrong')
accuracy = ncorrect / (ncorrect + nwrong)
coverage = (ncorrect + nwrong) / len(refdict)
noov = len(refdict) - (ncorrect + nwrong)
stats = {'nidentical': nidentical, 'ncorrect': ncorrect, 'noov': noov, 'accuracy': accuracy, 'coverage': coverage}
return df, stats
|
coocmap-main
|
evaluation.py
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
# Copyright (C) 2016-2018 Mikel Artetxe <artetxem@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import numpy as np
def get_array_module(x):
return np
def read(file, threshold=0, vocabulary=None, dtype='float'):
header = file.readline().split(' ')
count = int(header[0]) if threshold <= 0 else min(threshold, int(header[0]))
dim = int(header[1])
words = []
matrix = np.empty((count, dim), dtype=dtype) if vocabulary is None else []
for i in range(count):
word, vec = file.readline().split(' ', 1)
if word.strip() == '':
word2 = str(word.encode("utf-8"))
print(f'Warning: only space chars in word ({word2})', file=sys.stderr)
if vocabulary is None:
words.append(word)
matrix[i] = np.fromstring(vec, sep=' ', dtype=dtype)
elif word in vocabulary:
words.append(word)
matrix.append(np.fromstring(vec, sep=' ', dtype=dtype))
return (words, matrix) if vocabulary is None else (words, np.array(matrix, dtype=dtype))
def write(words, matrix, file):
m = matrix
print('%d %d' % m.shape, file=file)
for i in range(len(words)):
print(words[i] + ' ' + ' '.join(['%.6g' % x for x in m[i]]), file=file)
def length_normalize(matrix):
xp = get_array_module(matrix)
norms = xp.sqrt(xp.sum(matrix**2, axis=1))
norms[norms == 0] = 1
matrix /= norms[:, xp.newaxis]
def mean_center(matrix):
xp = get_array_module(matrix)
avg = xp.mean(matrix, axis=0)
matrix -= avg
def normalize(matrix, actions):
for action in actions:
if action == 'unit':
length_normalize(matrix)
elif action == 'center':
mean_center(matrix)
#################### End of original code ##################################
#################### Start of new code ##################################
else:
all = globals()
from inspect import isfunction
if action in all and isfunction(all[action]):
all[action](matrix)
else:
raise Exception('Unknown action: ' + action)
def sqrt(matrix):
xp = get_array_module(matrix)
matrix[:] = xp.sqrt(matrix)
def median_center(matrix):
xp = get_array_module(matrix)
# m = xp.median(matrix, axis=0)
m = np.percentile(matrix, q=50, axis=0)
matrix -= m
def pmi(X):
eps = 1e-8
rs = X.sum(axis=0, keepdims=True)
cs = X.sum(axis=1, keepdims=True)
X /= rs + eps
X /= cs + eps
def levy2014k(X, k=1):
eps = 1e-8
sum1 = np.sum(np.abs(X), axis=1, keepdims=True) + eps
sum0 = np.sum(np.abs(X), axis=0, keepdims=True) + eps
N = np.sum(X)
X[:] = np.maximum(0, np.log(X) + np.log(N) - np.log(sum1) - np.log(sum0) - np.log(k))
def levy2014_k5(X):
levy2014k(X, k=5)
def levy2014(X):
levy2014k(X, k=1)
def log(X):
X[:] = np.maximum(0, np.log(X))
def log1p(X):
X[:] = np.log(1 + X)
def glove(X):
# (8) of the glove paper: https://aclanthology.org/D14-1162.pdf
Y = np.log(1+X)
for _ in range(5):
bi = np.mean(Y, axis=1, keepdims=True)
Y -= bi
bj = np.mean(Y, axis=0, keepdims=True)
Y -= bj
print('bi ', np.mean(np.abs(bi)))
if np.mean(np.abs(bi)) > 1e-6:
print('bi failed', np.mean(np.abs(bi)))
if np.mean(np.abs(bj)) > 1e-6:
print('bj failed', np.mean(np.abs(bj)))
X[:] = Y
def unitL1(X):
norm1 = np.sum(np.abs(X), axis=1, keepdims=True)
norm1[norm1 == 0] = 1
X /= norm1
def fung1997(X):
from scipy.special import xlogy
sum1 = np.sum(np.abs(X), axis=1, keepdims=True)
sum0 = np.sum(np.abs(X), axis=0, keepdims=True)
N = np.sum(X)
X[:] = xlogy(X / N, X * N / (sum1 * sum0))
def length_normalize_axis0(matrix):
xp = get_array_module(matrix)
norms = xp.sqrt(xp.sum(matrix**2, axis=0))
norms[norms == 0] = 1
matrix /= norms
def mean_center_axis1(matrix):
xp = get_array_module(matrix)
avg = xp.mean(matrix, axis=1)
matrix -= avg[:, xp.newaxis]
# import faiss
# def faiss_knn(Q, X, k, dist='IP'):
# d = X.shape[1]
# if dist == 'IP':
# index = faiss.IndexFlatIP(d)
# elif dist == 'L2':
# index = faiss.IndexFlatL2(d)
# index.add(X)
# dists, inds = index.search(Q, k)
# return dists, inds
# def faiss_csls(Q, X, k, dist='IP', csls=10):
# # this k is neighborhood
# sim_bwd, _ = faiss_knn(X, Q, k=csls)
# knn_sim_bwd = sim_bwd.mean(axis=1)
# topvals, topinds = faiss_knn(Q, X, k=2*csls)
# for i in range(topvals.shape[0]):
# topvals[i] = 2 * topvals[i] - knn_sim_bwd[topinds[i]]
# ind = (-topvals).argsort(axis=1)
# topvals = np.take_along_axis(topvals, ind, axis=1)
# topinds = np.take_along_axis(topinds, ind, axis=1)
# return topvals, topinds
# def noise(X):
# xp = get_array_module(X)
# noise = np.random.randn(1, X.shape[1])
# noise /= xp.sqrt(xp.sum(noise**2))
# # size = np.random.randint(1, 3)
# size = 1
# randinds = np.random.randint(X.shape[1], size=size)
# X -= np.mean(X[randinds, :], axis=0)
# normalize(X, ['unit', 'center', 'unit'])
# def joint_noise(X, Y):
# xp = get_array_module(X)
# noise = np.random.randn(1, X.shape[1])
# noise /= xp.sqrt(xp.sum(noise**2))
# randinds = np.random.randint(X.shape[1], size=1)
# randcenter = np.mean(X[randinds, :], axis=0)
# X -= randcenter
# Y -= randcenter
# normalize(X, ['unit', 'center', 'unit'])
# normalize(Y, ['unit', 'center', 'unit'])
|
coocmap-main
|
embeddings.py
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
import os
from dataclasses import dataclass
import wandb
import shutil
import pandas as pd
import numpy as np
import data
import match
import evaluation
import embeddings
# experimental parameters
defaults = dict(
lan1='./europarl-v7.hu-en.en',
lan2='./europarl-v7.hu-en.hu',
eval='en-hu',
size1=20,
width=5,
symmetric=1,
vectorize='trunc', # fasttext sim_svd trunc word2vec
dim=300,
tokentype='WordLevel', # tokenizer WordLevel, BPE
vocab_size=5000,
limit_alphabet=100,
min_frequency=5,
supervision='unsupervised',
label='none',
)
os.environ["OMP_NUM_THREADS"] = "4" # export OMP_NUM_THREADS=4
os.environ["OPENBLAS_NUM_THREADS"] = "4" # export OPENBLAS_NUM_THREADS=4
os.environ["MKL_NUM_THREADS"] = "6" # export MKL_NUM_THREADS=6
os.environ["VECLIB_MAXIMUM_THREADS"] = "4" # export VECLIB_MAXIMUM_THREADS=4
os.environ["NUMEXPR_NUM_THREADS"] = "6" # export NUMEXPR_NUM_THREADS=6
os.environ['WANDB_IGNORE_GLOBS'] = 'lan1/*,lan2/*'
os.environ["WANDB_MODE"] = "offline" # switch to "online" to use wandb cloud sync
run = wandb.init(config=defaults, project='data efficiency')
base1 = os.path.join(wandb.run.dir, 'lan1')
base2 = os.path.join(wandb.run.dir, 'lan2')
os.makedirs(base1)
os.makedirs(base2)
cfg = wandb.config
def make_sized(lan, sizemb, pout, skipmb=0):
corpus = data.get_data(lan)
text = corpus.headmb(lan, skipmb+sizemb)
with open(pout, 'wt', encoding='utf-8') as fout:
fout.write(text[int(skipmb*1e6):])
p1 = os.path.join(base1, 'c.txt')
p2 = os.path.join(base2, 'c.txt')
make_sized(cfg.lan1, cfg.size1, p1)
size2 = cfg.size1 if cfg.symmetric == 1 else cfg.size2
skip2 = cfg.size1 if cfg.lan1 == cfg.lan2 else 0
make_sized(cfg.lan2, size2, p2, skipmb=skip2)
d1 = data.Corpus(p1, base1,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
d2 = data.Corpus(p2, base2,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
def get_evaldict():
lan1s, lan2s = cfg.eval.split('-')
eval = data.MUSEEval()
dictpath = os.path.join(wandb.run.dir, 'eval_id.dict')
with open(dictpath, 'wt', encoding='utf-8', errors='surrogateescape') as f:
v1 = d1.tokenizer.get_vocab()
v2 = d2.tokenizer.get_vocab()
intersection = set(v1.keys()).intersection(v2.keys())
print('vocab has overlap of length', len(intersection))
for w in intersection:
f.write(f'{w}\t{w}\n')
# dictid = dictpath
if lan1s != lan2s:
dictpath = os.path.join(wandb.run.dir, 'eval_dict.dict')
lanpath = eval.eval_path(f'{lan1s}-{lan2s}', type='full')
shutil.copyfile(lanpath, dictpath)
return dictpath
dictpath = get_evaldict()
@dataclass
class SearchArgs:
stochastic_interval = 5
stochastic_initial = 1
stochastic_multiplier = 2
threshold = 1e-4
maxiter = 100
eta = 1
method = 'orthogonal' # or orthogonal or lstsq
match = 'vecmap'
csls = True
args = SearchArgs()
dumpdir = os.path.join(wandb.run.dir, 'dump')
os.makedirs(dumpdir, exist_ok=True)
def evalf(sim):
# simf = match.most_diff_match(sim, k=10)
f, stats = evaluation.report_sim(sim, d1.tokenizer, d2.tokenizer, dictpath)
print(stats)
def dict_init_binary(tiebreak=1e-3):
inds = evaluation.dict_to_inds(dictpath, d1.tokenizer, d2.tokenizer, full=False)
sim = tiebreak * np.random.rand(d1.Co.shape[0], d2.Co.shape[0])
for i in range(len(inds[0])):
sim[inds[0][i], inds[1][i]] = 1
sim[0, 0] = 1
return sim
rows = []
def experiment(drop=20, dim=300, r1=99, r2=99):
def record(type, sim):
print(type)
simd = match.most_diff_match(sim, 10)
df, stats = evaluation.report_sim(simd, d1.tokenizer, d2.tokenizer, dictpath)
info = stats
info.update({'id': run.id, 'drop': drop, 'dim_p': dim, 'method_type': type})
for k, v in cfg.items():
if k in info: print(f'Warning: {k} already exist')
info[k] = v
rows.append(info)
wandb.log({'table': wandb.Table(dataframe=pd.DataFrame.from_records(rows))})
wandb.log({'basicinfo': info})
print(info)
df.to_csv(os.path.join(dumpdir, f'{type}-{drop}-{dim}.csv'))
normproc = ['unit', 'center', 'unit']
normproc1 = ['unit']
def f(Co):
X = np.sqrt(Co)
embeddings.normalize(X, normproc)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, simscoocmap = match.coocmapt(X, Z, args, normproc=normproc1, sim_init=None, evalf=evalf)
record('coocmap', simscoocmap)
def clip_drop():
def f(Co):
X = np.sqrt(Co)
embeddings.normalize(X, normproc)
match.clip(X, r1=r1, r2=r2)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, simscoocmap = match.coocmapt(X, Z, args, normproc=normproc1, sim_init=None, evalf=evalf)
record('coocmap-clip', simscoocmap)
dropinit = simscoocmap
def f(Co):
X = np.sqrt(Co)
embeddings.normalize(X, normproc)
X = match.svd_power(X, beta=1, drop=drop, dim=None)
embeddings.normalize(X, normproc)
match.clip(X, r1=r1, r2=r2)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, simscoocmap = match.coocmapt(X, Z, args, normproc=normproc1, sim_init=dropinit, evalf=evalf)
record('coocmap-drop', simscoocmap)
# clip_drop() # run clip and drop as well
experiment(drop=20, dim=300)
|
coocmap-main
|
test_coocmap.py
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
import itertools
import os
import sys
import subprocess
import time
# import lzma # needed for BUCC20Corpus
import numpy as np
from tokenizers import Token, Tokenizer
from tokenizers.models import BPE, WordLevel
from tokenizers.trainers import BpeTrainer, WordLevelTrainer
from tokenizers.pre_tokenizers import Metaspace, Whitespace, WhitespaceSplit
from tokenizers.normalizers import Lowercase
from fast import cooc_count
os.environ["TOKENIZERS_PARALLELISM"] = "true"
cachebase = os.path.expanduser('~/.cache/cooc/')
def full_path(lan):
if lan.startswith('~/') or lan.startswith('./') or lan.startswith('/'):
path = os.path.expanduser(lan)
print('trying path', path)
else:
# relative path from cachebase
path = os.path.expanduser(os.path.join(cachebase, lan))
print('trying cache path', path)
return path
def get_data(lan):
if lan in EuroParl.lans:
return EuroParl()
if lan in WikiDump.lans:
return WikiDump()
if lan in NewsCrawl.lans:
return NewsCrawl()
# else just get raw file from cache base
"""
wikidumps/zhwikishuf.jieba.txt: tokenized and to lower case
news-crawl/news.2018.en.shuffled.deduped: en-news for a change
"""
path = full_path(lan)
if os.path.isfile(path):
return HeadableData()
else:
raise Exception(f'No data for {lan} at {path}')
class HeadableData(object):
cachedir = os.path.expanduser(os.path.join(cachebase))
# works if you specify the path relative to the cachedir
def headmb(self, lan, sizemb):
size = int(sizemb * 1000000)
lantxt = full_path(lan)
f = open(lantxt, 'rt', encoding="utf-8")
sizedtxt = f.read(size)
return sizedtxt
class NewsCrawl(HeadableData):
"""
Data from https://data.statmt.org/news-crawl/en/
https://data.statmt.org/news-crawl/es/
processing of this data was very simple, so just notes here
wget https://data.statmt.org/news-crawl/en/news.2018.en.shuffled.deduped.gz
wget https://data.statmt.org/news-crawl/es/news.2019.es.shuffled.deduped.gz
wget https://data.statmt.org/news-crawl/hu/news.2019.hu.shuffled.deduped.gz
wget https://data.statmt.org/news-crawl/hu/news.2020.hu.shuffled.deduped.gz
wget https://data.statmt.org/news-crawl/hu/news.2021.hu.shuffled.deduped.gz
cat news.*.hu.* > news.2019-2021.hu.shuffled.deduped
This one removes around half the data
cat news.2019-2021.hu.shuffled.deduped | grep -v http | grep -v trackingCode > news.2019-2021.hu.shuffled.deduped.filtered
gzip -d *
"""
cachedir = os.path.expanduser(os.path.join(cachebase, 'news-crawl/'))
lans = ['news.2018.en.shuffled.deduped', 'news.2019.es.shuffled.deduped', 'news.2019-2021.hu.shuffled.deduped', 'news.2019-2021.hu.shuffled.deduped.filtered', 'news.2018-2019-2020-2022.hu.shuffled']
def headmb(self, lan, sizemb):
assert lan in self.lans, 'lan must be one of: ' + ', '.join(self.lans)
size = int(sizemb * 1000000)
lantxt = os.path.join(self.cachedir, f'{lan}')
f = open(lantxt, 'rt', encoding="utf-8")
sizedtxt = f.read(size)
return sizedtxt
class EuroParl(HeadableData):
cachedir = os.path.expanduser(os.path.join(cachebase, 'europarl/'))
urls = {
# 'fr-en': 'https://www.statmt.org/europarl/v7/fr-en.tgz',
# 'es-en': 'https://www.statmt.org/europarl/v7/es-en.tgz',
# 'de-en': 'https://www.statmt.org/europarl/v7/de-en.tgz',
'fi-en': 'https://www.statmt.org/europarl/v7/fi-en.tgz',
'hu-en': 'https://www.statmt.org/europarl/v7/hu-en.tgz',
}
lans_raw = [f'europarl-v7.{suf}' for suf in ['fr-en.fr', 'fr-en.en', 'es-en.es', 'es-en.en', 'de-en.de', 'hu-en.en', 'hu-en.hu', 'fi-en.fi', 'fi-en.en']]
lansshuf = [f'{pref}.shuf' for pref in lans_raw]
lans = lans_raw + lansshuf
def __init__(self):
cachedir = self.cachedir
if not os.path.isdir(cachedir):
print(f'Making dir {cachedir}', file=sys.stderr)
os.makedirs(cachedir, exist_ok=True)
def check_and_dl_all(self):
for lan in self.urls:
self.check_and_dl(lan)
for l in self.lans_raw:
if not os.path.isfile(os.path.join(self.cachedir, l)):
tgzname = l.split('.')[1] + '.tgz'
print(f'Extracting for {l}', file=sys.stderr)
proc = subprocess.run(f'tar xzf {tgzname}', shell=True, cwd=self.cachedir)
else:
print(f'Already extracted for {l}', file=sys.stderr)
for flan, fshuf in zip(self.lans_raw, self.lansshuf):
if not os.path.isfile(os.path.join(self.cachedir, fshuf)):
subprocess.run(f'cat {flan} | shuf > {fshuf}', shell=True, cwd=self.cachedir)
def check_and_dl(self, lan):
url = self.urls[lan]
fname = url.split('/')[-1]
outfile = os.path.join(self.cachedir, fname)
if not os.path.isfile(outfile):
print(f'Downloading {outfile}', file=sys.stderr)
proc = subprocess.run(f'wget -nv {url} -O {outfile}', shell=True, cwd=self.cachedir)
else:
print(f'Already downloaded {outfile}', file=sys.stderr)
def headmb(self, lan, sizemb):
assert lan in self.lans, 'lan must be one of: ' + ', '.join(self.lans)
size = int(sizemb * 1000000)
lantxt = os.path.join(self.cachedir, f'{lan}')
f = open(lantxt, 'rt', encoding="utf-8")
sizedtxt = f.read(size)
return sizedtxt
# https://www.statmt.org/europarl/v7/es-en.tgz
# wc: 2007723 52653110 346919801 europarl-v7.fr-en.fr
# wc: 2007723 50330641 301523301 europarl-v7.fr-en.en
class WikiDump(HeadableData):
"""
"""
urls = {
'enwiki': [
'https://dumps.wikimedia.org/enwiki/20230401/enwiki-20230401-pages-meta-current1.xml-p1p41242.bz2',
'https://dumps.wikimedia.org/enwiki/20230401/enwiki-20230401-pages-meta-current2.xml-p41243p151573.bz2',
'https://dumps.wikimedia.org/enwiki/20230401/enwiki-20230401-pages-meta-current3.xml-p151574p311329.bz2'
],
'eswiki': [
'https://dumps.wikimedia.org/eswiki/20230401/eswiki-20230401-pages-meta-current1.xml-p1p159400.bz2',
'https://dumps.wikimedia.org/eswiki/20230401/eswiki-20230401-pages-meta-current2.xml-p159401p693323.bz2',
'https://dumps.wikimedia.org/eswiki/20230401/eswiki-20230401-pages-meta-current3.xml-p693324p1897740.bz2'
],
'zhwiki': [
'https://dumps.wikimedia.org/zhwiki/20230401/zhwiki-20230401-pages-meta-current1.xml-p1p187712.bz2',
'https://dumps.wikimedia.org/zhwiki/20230401/zhwiki-20230401-pages-meta-current2.xml-p187713p630160.bz2',
'https://dumps.wikimedia.org/zhwiki/20230401/zhwiki-20230401-pages-meta-current3.xml-p630161p1389648.bz2',
'https://dumps.wikimedia.org/zhwiki/20230401/zhwiki-20230401-pages-meta-current4.xml-p1389649p2889648.bz2',
'https://dumps.wikimedia.org/zhwiki/20230401/zhwiki-20230401-pages-meta-current4.xml-p2889649p3391029.bz2',
'https://dumps.wikimedia.org/zhwiki/20230401/zhwiki-20230401-pages-meta-current5.xml-p3391030p4891029.bz2'
],
'frwiki': [
'https://dumps.wikimedia.org/frwiki/20230401/frwiki-20230401-pages-meta-current1.xml-p1p306134.bz2',
'https://dumps.wikimedia.org/frwiki/20230401/frwiki-20230401-pages-meta-current2.xml-p306135p1050822.bz2',
'https://dumps.wikimedia.org/frwiki/20230401/frwiki-20230401-pages-meta-current3.xml-p1050823p2550822.bz2'
],
'dewiki': [
'https://dumps.wikimedia.org/dewiki/20230401/dewiki-20230401-pages-meta-current1.xml-p1p297012.bz2',
'https://dumps.wikimedia.org/dewiki/20230401/dewiki-20230401-pages-meta-current2.xml-p297013p1262093.bz2',
'https://dumps.wikimedia.org/dewiki/20230401/dewiki-20230401-pages-meta-current3.xml-p1262094p2762093.bz2'
]
}
lans = {'enwikishuf', 'eswikishuf', 'zhwikishuf', 'frwikishuf', 'dewikishuf'}
cachedir = os.path.expanduser(os.path.join(cachebase, 'wikidumps/'))
def __init__(self):
cachedir = self.cachedir
if not os.path.isdir(cachedir):
print(f'Making dir {cachedir}', file=sys.stderr)
os.makedirs(cachedir, exist_ok=True)
def check_and_dl_all(self):
for lan in self.urls:
self.check_and_dl(lan)
def check_and_dl(self, lan):
landir = os.path.join(self.cachedir, lan)
if not os.path.isdir(landir):
os.makedirs(landir, exist_ok=True)
urls = self.urls[lan]
for partn, url in enumerate(urls):
# get last part of the url
fname = url.split('/')[-1]
outfile = os.path.join(landir, fname)
if not os.path.isfile(outfile):
print(f'Downloading {outfile}', file=sys.stderr)
proc = subprocess.Popen(['wget', '-nv', url, '-O', outfile])
output, error = proc.communicate()
print(output, file=sys.stderr)
print(error, file=sys.stderr)
else:
print(f'Already downloaded {outfile}', file=sys.stderr)
outdir = os.path.join(landir, f'OUT_{fname}')
if not os.path.isdir(outdir):
proc = subprocess.Popen(f'python -m wikiextractor.WikiExtractor {outfile} -o {outdir} -b 100M --no-templates'.split())
output, error = proc.communicate()
print(output, file=sys.stderr)
print(error, file=sys.stderr)
# cat OUT_*/*/wiki* > ../zhwiki.txt
# cat enwiki.txt | grep -v '</doc>' | grep -v '<doc id=' | shuf > enwikishuf.txt
# lantxt = os.path.join(self.cachedir, f'{lan}.txt')
# if not os.path.isfile(lantxt):
# print(f'concatenating to {lantxt}')
# with open(lantxt, 'w') as f:
# proc = subprocess.Popen(f'cat {landir}/OUT_*/*/wiki*'.split(), stdout=f)
# output, error = proc.communicate()
# # print(output, file=sys.stderr)
# print(error, file=sys.stderr)
def headmb(self, lan, sizemb):
assert lan in self.lans
size = int(sizemb * 1000000)
lantxt = os.path.join(self.cachedir, f'{lan}.txt')
f = open(lantxt, 'rt', encoding="utf-8")
sizedtxt = f.read(size)
return sizedtxt
class BUCC20Corpus(object):
dataurls = {
'en-wiki': 'http://corpus.leeds.ac.uk/serge/bucc/en.ol.xz',
'es-wiki': 'http://corpus.leeds.ac.uk/serge/bucc/es.ol.xz',
'zh-wiki': 'http://corpus.leeds.ac.uk/serge/bucc/zh.ol.xz',
'en-wac': 'http://corpus.leeds.ac.uk/serge/bucc/ukwac.ol.xz',
'de-wac': 'http://corpus.leeds.ac.uk/serge/bucc/dewac.ol.xz',
'fr-wac': 'http://corpus.leeds.ac.uk/serge/bucc/frwac.ol.xz',
'ru-wac': 'http://corpus.leeds.ac.uk/serge/bucc/ruwac.ol.xz',
}
cachedir = os.path.expanduser('~/.cache/bucc20/corpus/')
sizeddir = os.path.expanduser('~/.cache/bucc20/corpus/sized/')
def __init__(self):
sizeddir = BUCC20Corpus.sizeddir
cachedir = BUCC20Corpus.cachedir
if not os.path.isdir(cachedir):
print(f'Making dir {cachedir}', file=sys.stderr)
os.makedirs(cachedir, exist_ok=True)
if not os.path.isdir(sizeddir):
print(f'Making dir {sizeddir}', file=sys.stderr)
os.makedirs(sizeddir, exist_ok=True)
def check_and_dl_all(self):
for lan in BUCC20Corpus.dataurls:
self.check_and_dl(lan)
def check_and_dl(self, lan):
cachedir = BUCC20Corpus.cachedir
url = BUCC20Corpus.dataurls[lan]
outfile = os.path.join(cachedir, lan + '.xz')
# print(f'Making cache dir {self.cachedir}', file=sys.stderr)
if not os.path.isdir(cachedir):
print(f'Making cache dir {cachedir}', file=sys.stderr)
os.makedirs(cachedir, exist_ok=True)
if not os.path.isfile(outfile):
print(f'Downloading {outfile}', file=sys.stderr)
proc = subprocess.Popen(['wget', url, '-O', outfile])
output, error = proc.communicate()
print(output, file=sys.stderr)
print(error, file=sys.stderr)
def headmb(self, lan, sizemb):
size = int(sizemb * 1000000)
xzfile = os.path.join(BUCC20Corpus.cachedir, lan + '.xz')
if not os.path.isfile(xzfile):
self.check_and_dl(lan)
f = lzma.open(xzfile, 'rt', encoding="utf-8")
sizedtxt = f.read(size)
return sizedtxt
class EvalData(object):
def eval_path(self, lanpair):
pass
class MUSEEval(EvalData):
_base = 'https://dl.fbaipublicfiles.com/arrival/dictionaries/'
pairs = ['en-de', 'en-es', 'en-fr', 'en-ru', 'en-zh', 'en-fi', 'en-hu']
cachedir = os.path.expanduser(os.path.join(cachebase, 'muse_dicts/'))
types = {
'full': '.txt',
'train': '.0-5000.txt',
'test': '.5000-6500.txt'
}
def __init__(self, ):
if not os.path.isdir(self.cachedir):
print(f'Making cache dir {self.cachedir}', file=sys.stderr)
os.makedirs(self.cachedir, exist_ok=True)
for p in self.pairs:
self.download(p)
def download(self, p):
for t in self.types:
suff = self.types[t]
url = self._base + f'{p}{suff}'
outfile = os.path.join(self.cachedir, f'{p}{suff}')
if not os.path.isfile(outfile):
print(f'Downloading {url}', file=sys.stderr)
proc = subprocess.Popen(['wget', url, '-O', outfile])
output, error = proc.communicate()
def eval_path(self, lanpair, type='full'):
if lanpair not in self.pairs:
print(f'lanpair {lanpair} not in {self.pairs}, try downloading')
self.download(lanpair)
return os.path.join(self.cachedir, lanpair + self.types[type])
class BUCC20Eval(EvalData):
dataurls = {
'de-en': 'https://comparable.limsi.fr/bucc2020/tr/de-en-1-5000-training.txt',
'es-en': 'https://comparable.limsi.fr/bucc2020/tr/es-en-1-5000-training.txt',
'fr-en': 'https://comparable.limsi.fr/bucc2020/tr/fr-en-1-5000-training.txt',
'ru-en': 'https://comparable.limsi.fr/bucc2020/tr/ru-en-1-5000-training.txt',
'zh-en': 'https://comparable.limsi.fr/bucc2020/tr/zh-en-1-4500-training.txt',
'en-de': 'https://comparable.limsi.fr/bucc2020/tr/en-de-1-5000-training.txt',
'en-es': 'https://comparable.limsi.fr/bucc2020/tr/en-es-1-5000-training.txt',
'en-fr': 'https://comparable.limsi.fr/bucc2020/tr/en-fr-1-5000-training.txt',
'en-ru': 'https://comparable.limsi.fr/bucc2020/tr/en-ru-1-5000-training.txt',
'en-zh': 'https://comparable.limsi.fr/bucc2020/tr/en-zh-1-5000-training.txt',
}
cachedir = os.path.expanduser('~/.cache/bucc20/train/')
def __init__(self, ):
cachedir = BUCC20Eval.cachedir
if not os.path.isdir(cachedir):
print(f'Making cache dir {cachedir}', file=sys.stderr)
os.makedirs(cachedir, exist_ok=True)
for lanpair in BUCC20Eval.dataurls:
url = BUCC20Eval.dataurls[lanpair]
outfile = os.path.join(cachedir, lanpair + '.txt')
if not os.path.isfile(outfile):
print(f'Downloading {url}', file=sys.stderr)
proc = subprocess.Popen(['wget', url, '-O', outfile])
output, error = proc.communicate()
def eval_path(self, lanpair):
return os.path.join(BUCC20Eval.cachedir, lanpair + '.txt')
def train_bpe_tokenizer(train_data_paths, model_path, vocab_size, limit_alphabet=100, min_frequency=10):
trainer = BpeTrainer(special_tokens=["[UNK]"], min_frequency=min_frequency,
vocab_size=vocab_size, limit_alphabet=limit_alphabet)
tokenizer = Tokenizer(BPE(unk_token="[UNK]", fuse_unk=True))
tokenizer.normalizer = Lowercase()
tokenizer.pre_tokenizer = Metaspace()
print(f'data: {train_data_paths}', file=sys.stderr)
tokenizer.train(train_data_paths, trainer)
tokenizer.save(model_path)
return tokenizer
def train_word_tokenizer(train_data_paths, model_path, vocab_size, limit_alphabet=100, min_frequency=10):
trainer = WordLevelTrainer(special_tokens=["[UNK]"], min_frequency=min_frequency,
vocab_size=vocab_size, limit_alphabet=limit_alphabet)
tokenizer = Tokenizer(WordLevel(unk_token="[UNK]"))
# tokenizer.pre_tokenizer = Metaspace()
tokenizer.pre_tokenizer = Whitespace()
tokenizer.normalizer = Lowercase()
print(f'data: {train_data_paths}', file=sys.stderr)
tokenizer.train(train_data_paths, trainer)
tokenizer.save(model_path)
return tokenizer
def tokenize(data_path: str, tokenizer: Tokenizer):
t1 = time.perf_counter()
with open(data_path, 'r') as f:
all = f.readlines()
t2 = time.perf_counter()
print(f'reading {data_path} took: {t2 - t1 :.3f}', file=sys.stderr)
alljoined = "".join(all)
print(f'number of chars: {len(alljoined)}', file=sys.stderr)
all = alljoined.split('\n')
print(f'number of lines: {len(all)}', file=sys.stderr)
t1 = time.perf_counter()
tokened = tokenizer.encode_batch(all)
t2 = time.perf_counter()
print(f'encode_batch took: {t2 - t1 :.3f}', file=sys.stderr)
return tokened
def text_to_cooc(tokened, tokenizer: Tokenizer, width=6):
ids = [t.ids for t in tokened]
joined_ids = list(itertools.chain(*ids))
print('num words: ', len(joined_ids))
t1 = time.perf_counter()
cooc = cooc_count.cooc(np.array(joined_ids, dtype=np.int64), width=width, vocab_size=tokenizer.get_vocab_size())
t2 = time.perf_counter()
print(f'constructing cooc took: {t2 - t1 :.3f}', file=sys.stderr)
return cooc
class Corpus(object):
def __init__(self, datapath, basepath,
tokentype, vocab_size, limit_alphabet, min_frequency,
vectorize: str, width: int, dim: int, adaptive=False, write_vecs=False):
self.adaptive = adaptive
self.write_vecs = write_vecs
self.dim = int(dim)
self.width = int(width)
self.vectorize = vectorize
self.datapath = datapath
os.makedirs(basepath, exist_ok=True)
self.model_path = os.path.join(basepath, 'model.json')
if tokentype == 'WordLevel':
self.tokenizer = train_word_tokenizer([self.datapath], self.model_path, vocab_size, limit_alphabet, min_frequency)
elif tokentype == 'BPE':
self.tokenizer = train_bpe_tokenizer([self.datapath], self.model_path, vocab_size, limit_alphabet, min_frequency)
else:
raise Exception(f'{tokentype} not recognized')
self.tokened = tokenize(self.datapath, self.tokenizer)
self.tokened_out = os.path.join(basepath, 'c.tok')
self.ids_out = os.path.join(basepath, 'c.ids')
t1 = time.perf_counter()
if vectorize == 'fasttext' or vectorize == 'word2vec':
with open(self.tokened_out, 'w') as f:
# basic tokenizer does not output UNK for unknown words, leading to all words being used
f.writelines([' '.join([self.tokenizer.id_to_token(id) for id in li.ids]) for li in self.tokened])
# with open(self.ids_out, 'w') as f:
# f.writelines([' '.join([str(id) for id in li.ids]) for li in self.tokened])
t2 = time.perf_counter()
print(f'writing tokened took: {t2 - t1 :.3f}', file=sys.stderr)
self.vecpath = os.path.join(basepath, 'c.vec')
self.vecs = {}
self.Co = text_to_cooc(self.tokened, self.tokenizer, width=self.width)
if vectorize == 'fasttext':
self._fasttext_vecs()
elif vectorize == 'word2vec':
self._word2vec_vecs()
elif vectorize == 'sim_svd':
self._sim_vecs()
elif vectorize == 'trunc':
self._count_vecs()
else:
raise Exception('vectorize type not recognized')
def _write_vectors(self, m):
self.vec = m
with open(self.vecpath, 'w') as f:
vs = self.tokenizer.get_vocab_size()
print('%d %d' % m.shape, file=f)
for i in range(vs):
print(self.tokenizer.id_to_token(i) + ' ' + ' '.join(['%.6g' % x for x in m[i]]), file=f)
def _sim_vecs(self, alpha=0.5, beta=1):
maxdim = min(self.Co.shape[1], 10000)
Cot = self.Co[:, :maxdim]
u, s, _ = np.linalg.svd(np.power(Cot, alpha), full_matrices=False)
u = u[:, :self.dim]*np.power(s[:self.dim], beta)
self.vecs['sim_svd'] = u
if self.write_vecs:
self._write_vectors(mat)
def _fasttext_vecs(self, epoch=5):
import fasttext
# https://fasttext.cc/docs/en/options.html
# two common corrections for fasttext
if self.adaptive:
lradapt = 0.1 / np.power(self.dim / 50, 0.5)
mbsize = os.stat(self.tokened_out).st_size / 1e6
epoch = 5 if mbsize > 300 else int(5 * np.sqrt(300 / mbsize))
config = dict(model='skipgram', lr=lradapt, dim=self.dim, ws=self.width, epoch=epoch)
else:
config = dict(model='skipgram', lr=0.05, dim=self.dim, ws=self.width, epoch=5)
print(config)
# config = dict(model='skipgram', lr=0.05, dim=self.dim, ws=self.width, epoch=epoch, minn=0, maxn=0)
model = fasttext.train_unsupervised(self.tokened_out, thread=8, **config)
mat = np.zeros((self.tokenizer.get_vocab_size(), model.dim), dtype=float)
for w in self.tokenizer.get_vocab():
v = model.get_word_vector(w)
i = self.tokenizer.token_to_id(w)
mat[i] = v
self.vecs['fasttext'] = mat
if self.write_vecs:
self._write_vectors(mat)
def _word2vec_vecs(self, epoch=5):
import fasttext
# https://fasttext.cc/docs/en/options.html
# just fasttext without subwords
config = dict(model='skipgram', lr=0.05, dim=self.dim, ws=self.width, epoch=epoch, minn=0, maxn=0)
model = fasttext.train_unsupervised(self.tokened_out, thread=8, **config)
mat = np.zeros((self.tokenizer.get_vocab_size(), model.dim), dtype=float)
for w in self.tokenizer.get_vocab():
v = model.get_word_vector(w)
i = self.tokenizer.token_to_id(w)
mat[i] = v
self.vecs['word2vec'] = mat
def _count_vecs(self, alpha=0.5):
mat = np.power(np.array(self.Co[:, :self.dim], dtype=float), alpha)
self.vecs['trunc'] = mat
if self.write_vecs:
self._write_vectors(mat)
# eval = BUCC20Eval()
# bucc20.get_sized('zh-wiki', 2)
# bucc20.get_sized('zh-wiki', 4)
|
coocmap-main
|
data.py
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
from collections import Counter
import numpy as np
import embeddings
np.set_printoptions(formatter={'float': lambda x: "{0:0.3f}".format(x)})
MAX_SVD_DIM = 5000 # maximum SVD to avoid long compute time
### initialization methods ###
def vecmap_unsup(x, z, norm_proc=['unit', 'center', 'unit']):
print('maxdim', MAX_SVD_DIM)
sim_size = min(MAX_SVD_DIM, min(x.shape[0], z.shape[0]))
u, s, vt = np.linalg.svd(x, full_matrices=False)
xsim = (u*s).dot(u.T)
u, s, vt = np.linalg.svd(z, full_matrices=False)
zsim = (u*s).dot(u.T)
del u, s, vt
xsim.sort(axis=1)
zsim.sort(axis=1)
norm_proc = ['unit', 'center', 'unit']
embeddings.normalize(xsim, norm_proc)
embeddings.normalize(zsim, norm_proc)
sim = xsim.dot(zsim.T)
return sim
def match_sim(xsim, zsim, sort=True, metric='cosine', norm_proc=['unit', 'center', 'unit']):
sim_size = min(xsim.shape[1], zsim.shape[1])
xsim = np.array(xsim[:, :sim_size])
zsim = np.array(zsim[:, :sim_size])
if sort:
xsim.sort(axis=1)
zsim.sort(axis=1)
embeddings.normalize(xsim, norm_proc)
embeddings.normalize(zsim, norm_proc)
sim = xsim @ zsim.T
return sim
### main search loops ###
def vecmap(x: np.ndarray, z: np.ndarray, args, sim_init=None, evalf=None):
print('running vecmap', x.shape)
keep_prob = args.stochastic_initial
best_objective = float('-inf')
last_improvement = 0
end = False
inds1, inds2 = 0, 0
for it in range(args.maxiter):
if it - last_improvement > args.stochastic_interval:
# maxswaps = max(1, maxswaps - 1)
if keep_prob == 1:
end = True
keep_prob = min(1.0, args.stochastic_multiplier * keep_prob)
last_improvement = it
if it == 0:
if sim_init is not None:
sim = sim_init
else:
sim = vecmap_unsup(x, z, norm_proc=['unit', 'center', 'unit'])
else:
# rotation
if args.method == 'orthogonal':
u, s, vt = np.linalg.svd(x[inds1].T @ z[inds2])
w = u @ vt
elif args.method == 'lstsq':
w, r, r, s = np.linalg.lstsq(x[inds1], z[inds2], rcond=1e-5)
sim = x @ w @ z.T
#
if args.csls:
sim = most_diff_match(sim, 10)
inds1, inds2, evalsim = match(sim, args.match)
if evalf is not None:
evalf(evalsim)
objf = np.mean(sim.max(axis=1))
objb = np.mean(sim.max(axis=0))
objective = (objf + objb) / 2
print(f'{it} {keep_prob} \t{objf:.4f}\t{objective:.4f}\t{best_objective:.4f}')
if objective >= best_objective + args.threshold:
last_improvement = it
if it != 0:
best_objective = objective
if end:
break
return inds1, inds2, sim
def coocmapt(Cp1: np.ndarray, Cp2: np.ndarray, args, normproc=['unit'], sim_init=None, evalf=None):
"""
basic coocmap using just numpy but only works for cosine distance
"""
best_objective = float('-inf')
last_improvement = 0
end = False
inds1, inds2 = 0, 0
simd = 0
for it in range(args.maxiter):
if it - last_improvement > args.stochastic_interval:
end = True
if it == 0:
if sim_init is not None:
sim = sim_init
else:
sim = match_sim(Cp1, Cp2, sort=True, metric='cosine', norm_proc=['unit', 'center', 'unit'])
sim_init = sim
# sim = vecmap_unsup(Cp1, Cp2)
if args.csls:
sim = most_diff_match(sim, 10)
inds1, inds2, evalsim = match(sim, args.match)
if evalf is not None:
evalf(evalsim)
if end:
break
uniqf2 = uniqb1 = len(inds1)
Cp1f = Cp1[:, inds1]
Cp2f = Cp2[:, inds2]
embeddings.normalize(Cp1f, normproc)
embeddings.normalize(Cp2f, normproc)
# maybe these matches
sim = Cp1f @ Cp2f.T
# X = torch.from_numpy(Cp1f)
# Y = torch.from_numpy(Cp2f)
# sim = -torch.cdist(X, Y, p=2).numpy()
objf = np.mean(np.max(sim, axis=1))
objb = np.mean(np.max(sim, axis=0))
objective = 0.5 * (objf + objb)
if objective > best_objective:
last_improvement = it
if it > 0: # the initial round use a different matrix and should not be compared
best_objective = objective
print(f'objective {it} \t{objf:.5f} \t{objective:.5f} \t {best_objective:.5f} \t {uniqf2} \t {uniqb1}')
return inds1, inds2, sim
def coocmapl1(Cp1: np.ndarray, Cp2: np.ndarray, args, normproc=['unit'], sim_init=None, evalf=None):
"""
duplicated code using cdistance from torch, mainly to test l1 distance
"""
best_objective = float('-inf')
last_improvement = 0
end = False
inds1, inds2 = 0, 0
simd = 0
for it in range(args.maxiter):
if it - last_improvement > args.stochastic_interval:
end = True
if it == 0:
if sim_init is not None:
sim = sim_init
else:
sim = match_sim(Cp1, Cp2, sort=True, metric='cosine', norm_proc=['unit', 'center', 'unit'])
sim_init = sim
# sim = vecmap_unsup(Cp1, Cp2)
if args.csls:
sim = most_diff_match(sim, 10)
inds1, inds2, evalsim = match(sim, args.match)
if evalf is not None:
evalf(evalsim)
if end:
break
uniqf2 = uniqb1 = len(inds1)
Cp1f = Cp1[:, inds1]
Cp2f = Cp2[:, inds2]
embeddings.normalize(Cp1f, normproc)
embeddings.normalize(Cp2f, normproc)
# maybe these matches
# sim = Cp1f @ Cp2f.T
import torch
if torch.cuda.is_available():
X = torch.from_numpy(Cp1f).cuda()
Y = torch.from_numpy(Cp2f).cuda()
sim = -torch.cdist(X, Y, p=1).cpu().numpy()
else:
X = torch.from_numpy(Cp1f)
Y = torch.from_numpy(Cp2f)
sim = -torch.cdist(X, Y, p=1).numpy()
# this is only approximately a greedy method, as this objective is not guaranteed to increase
objf = np.mean(np.max(sim, axis=1))
objb = np.mean(np.max(sim, axis=0))
objective = 0.5 * (objf + objb)
if objective > best_objective:
last_improvement = it
if it > 0: # the initial round use a different matrix and should not be compared
best_objective = objective
print(f'objective {it} \t{objf:.5f} \t{objective:.5f} \t {best_objective:.5f} \t {uniqf2} \t {uniqb1}')
return inds1, inds2, sim
def svd_power(X, beta=1, drop=None, dim=None, symmetric=False):
u, s, vt = np.linalg.svd(X, full_matrices=False)
print('np.power(s)', np.power(s, 1).sum())
if dim is not None:
# s = np.sqrt(np.maximum(0, s**2 - s[dim]**2))
# s = np.maximum(0, s - s[dim])
s[dim:]=0
print('np.power(s_dim)', np.power(s, 1).sum())
if dim is not None:
s = np.power(s, beta)
if drop is not None:
if isinstance(drop, np.ndarray):
s[list(drop)] = 0
elif isinstance(drop, int):
s[:drop] = 0
print('np.power(s_drop)', np.power(s, 1).sum())
if symmetric:
res = (u * s) @ u.T
else:
res = (u * s) @ vt
norm = np.linalg.norm(res - X, ord='fro')
normX = np.linalg.norm(X, ord='fro')
print(f'diff {norm:.2e} / {normX:.2e}')
return res
def sim_vecs(Co, dim, alpha=0.5, beta=1):
maxdim = min(Co.shape[1], 10000)
Co = Co[:, :maxdim]
u, s, _ = np.linalg.svd(np.power(Co, alpha), full_matrices=False)
u = u[:, :dim]*np.power(s[:dim], beta)
return u
### matching methods ###
def greedy_match(sim0, iters=10):
sim = sim0.copy()
for i in range(iters):
# if sim is n by m, am1 is size m, am0 is size n
am1 = np.nanargmax(sim, axis=0)
am0 = np.nanargmax(sim, axis=1)
bi0 = am0[am1] == np.arange(sim.shape[1])
bi1 = am1[am0] == np.arange(sim.shape[0])
assert bi0.sum() == bi1.sum()
bimatches = bi0.sum()
uniques = len(np.unique(am0)), len(np.unique(am1))
hubs = np.mean([c for _, c in Counter(am0).most_common(3)])
value = np.take_along_axis(sim0, am1[:, None], axis=1).mean()
stats = {'bimatches': bimatches, 'uniques': uniques, 'hubs': hubs, 'value': value}
print(stats)
if bimatches > 0.98 * min(*sim.shape):
break
for i in range(sim.shape[0]):
if bi1[i]:
sim[i] = float('nan')
sim[:, am0[i]] = float('nan')
sim[i, am0[i]] = float('inf')
return np.arange(sim.shape[1])[bi0], am0[bi0], sim
def most_diff_match(sim0, k):
sim = sim0.copy()
top0 = -np.partition(-sim, kth=k, axis=0)
top1 = -np.partition(-sim, kth=k, axis=1)
mean0 = top0[:k, :].mean(axis=0, keepdims=True)
mean1 = top1[:, :k].mean(axis=1, keepdims=True)
return sim - 0.5*(mean0 + mean1)
def forward_backward_match(sim):
indsf2 = np.argmax(sim, axis=1)
indsb1 = np.argmax(sim, axis=0)
indsb2 = np.arange(sim.shape[1])
indsf1 = np.arange(sim.shape[0])
inds1 = np.concatenate((indsf1, indsb1))
inds2 = np.concatenate((indsf2, indsb2))
hubsf = Counter(indsf2).most_common(3)
hubsb = Counter(indsb1).most_common(3)
print('hubs', hubsf, hubsb)
return inds1, inds2, sim
def match(sim, method):
if method == 'vecmap':
return forward_backward_match(sim)
elif method == 'coocmap':
return greedy_match(sim, iters=10)
### clipping ###
def clipthres(A, p1, p2):
R1 = np.percentile(A, p1, axis=1, keepdims=True)
r = np.percentile(R1, p2)
print('percent greater \t', np.sum((A > r) * 1) / A.size)
return r
def clipBoth(A, r1, r2):
ub = clipthres(A, r1, r2)
lb = clipthres(A, 100-r1, 100-r2)
print('clipped', lb, ub)
return lb, ub
def clip(A, r1=99, r2=99):
lb, ub = clipBoth(A, r1, r2)
A[A < lb] = lb
A[A > ub] = ub
|
coocmap-main
|
match.py
|
import os
import subprocess
from dataclasses import dataclass
import lzma
import wandb
import argparse
import shutil
import pandas as pd
import numpy as np
import data
import match
import evaluation
import embeddings
# from baselines import VecMap
os.environ['WANDB_IGNORE_GLOBS'] = 'lan1/*,lan2/*'
os.environ["OMP_NUM_THREADS"] = "4" # export OMP_NUM_THREADS=4
os.environ["OPENBLAS_NUM_THREADS"] = "4" # export OPENBLAS_NUM_THREADS=4
os.environ["MKL_NUM_THREADS"] = "6" # export MKL_NUM_THREADS=6
os.environ["VECLIB_MAXIMUM_THREADS"] = "4" # export VECLIB_MAXIMUM_THREADS=4
os.environ["NUMEXPR_NUM_THREADS"] = "6" # export NUMEXPR_NUM_THREADS=6
# os.environ["WANDB_MODE"] = "offline"
defaults = dict(
# data
lan1='enwikishuf',
lan2='dewikishuf',
eval='en-de',
size1=300,
# size2=20, skip2=10,
symmetric=1,
width=5,
# vectorization fasttext sim_svd count
vectorize='trunc',
dim=300,
# tokenizer WordLevel, BPE
tokentype='WordLevel',
vocab_size=500,
limit_alphabet=100,
min_frequency=5,
# experiment
supervision='unsupervised',
label='glove',
)
run = wandb.init(config=defaults, project='data efficiency')
base1 = os.path.join(wandb.run.dir, 'lan1')
base2 = os.path.join(wandb.run.dir, 'lan2')
os.makedirs(base1)
os.makedirs(base2)
cfg = wandb.config
def make_sized(lan, sizemb, pout, skipmb=0):
corpus = data.get_data(lan)
text = corpus.headmb(lan, skipmb+sizemb)
with open(pout, 'wt', encoding='utf-8') as fout:
fout.write(text[int(skipmb*1e6):])
p1 = os.path.join(base1, 'c.txt')
p2 = os.path.join(base2, 'c.txt')
make_sized(cfg.lan1, cfg.size1, p1)
size2 = cfg.size1 if cfg.symmetric == 1 else cfg.size2
skip2 = cfg.size1 if cfg.lan1 == cfg.lan2 else 0
make_sized(cfg.lan2, size2, p2, skipmb=skip2)
d1 = data.Corpus(p1, base1,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
d2 = data.Corpus(p2, base2,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
def get_evaldict():
lan1s, lan2s = cfg.eval.split('-')
eval = data.MUSEEval()
dictpath = os.path.join(wandb.run.dir, 'eval_id.dict')
with open(dictpath, 'wt', encoding='utf-8', errors='surrogateescape') as f:
v1 = d1.tokenizer.get_vocab()
v2 = d2.tokenizer.get_vocab()
intersection = set(v1.keys()).intersection(v2.keys())
print('vocab has overlap of length', len(intersection))
for w in intersection:
f.write(f'{w}\t{w}\n')
# dictid = dictpath
if lan1s != lan2s:
dictpath = os.path.join(wandb.run.dir, 'eval_dict.dict')
lanpath = eval.eval_path(f'{lan1s}-{lan2s}', type='full')
shutil.copyfile(lanpath, dictpath)
return dictpath
dictpath = get_evaldict()
@dataclass
class SearchArgs:
stochastic_interval = 10
stochastic_add = 1e-1
stochastic_multiplier = 2
threshold = 1e-4
stochastic_initial = 1
maxswaps = 100
maxiter = 100
eta = 1
#
method = 'orthogonal' # or orthogonal or lstsq
match = 'vecmap'
csls = True
args = SearchArgs()
dumpdir = os.path.join(wandb.run.dir, 'dump')
os.makedirs(dumpdir, exist_ok=True)
def evalf(sim):
# simf = match.most_diff_match(sim, k=3)
f, stats = evaluation.report_sim(sim, d1.tokenizer, d2.tokenizer, dictpath)
print(stats)
def dict_init_binary(tiebreak=1e-3):
inds = evaluation.dict_to_inds(dictpath, d1.tokenizer, d2.tokenizer, full=False)
sim = tiebreak * np.random.rand(d1.Co.shape[0], d2.Co.shape[0])
for i in range(len(inds[0])):
sim[inds[0][i], inds[1][i]] = 1
sim[0, 0] = 1
return sim
rows = []
def experiment(drop=20, dim=300):
print('original dim', d1.Co.shape)
def record(type, sim):
print(type)
# plt.figure()
# plt.imshow(sims[type])
simd = match.most_diff_match(sim, 10)
# inds1, inds2, sim_greed = match.greedy_match(simd, iters=5)
df, stats = evaluation.report_sim(simd, d1.tokenizer, d2.tokenizer, dictpath)
info = stats
info.update({'id': run.id, 'drop': drop, 'dim_p': dim, 'method_type': type})
for k, v in cfg.items():
if k in info: print(f'Warning: {k} already exist')
info[k] = v
rows.append(info)
wandb.log({'table': wandb.Table(dataframe=pd.DataFrame.from_records(rows))})
wandb.log({'basicinfo': info})
print(info)
df.to_csv(os.path.join(dumpdir, f'{type}-{drop}-{dim}.csv'))
normproc1 = ['unit']
normproc = ['unit', 'center', 'unit']
def standard_normalize(normproc, name):
A1 = np.array(1.0*d1.Co)
A2 = np.array(1.0*d2.Co)
embeddings.normalize(A1, normproc)
embeddings.normalize(A2, normproc)
_, _, simscoocmap = match.coocmapt(A1, A2, args, normproc=normproc1, sim_init=None, evalf=evalf)
record(name, simscoocmap)
# standard_normalize(['log'] + normproc, 'log')
standard_normalize(['log1p'] + normproc, 'log1p')
def levy2014():
standard_normalize(['levy2014', 'unit'], 'levy2014-l2')
standard_normalize(['levy2014'] + normproc, 'levy2014-normalize')
standard_normalize(['levy2014_k5', 'unit'], 'levy2014_k5-l2')
standard_normalize(['levy2014_k5'] + normproc, 'levy2014_k5-normalize')
A1 = np.array(1.0*d1.Co)
A2 = np.array(1.0*d2.Co)
embeddings.normalize(A1, ['levy2014', 'unitL1'])
embeddings.normalize(A2, ['levy2014', 'unitL1'])
_, _, simscoocmap = match.coocmapl1(A1, A2, args, normproc=['unitL1'], sim_init=None, evalf=evalf)
record('levy2014-l1', simscoocmap)
A1 = np.array(1.0*d1.Co)
A2 = np.array(1.0*d2.Co)
embeddings.normalize(A1, ['levy2014_k5', 'unitL1'])
embeddings.normalize(A2, ['levy2014_k5', 'unitL1'])
_, _, simscoocmap = match.coocmapl1(A1, A2, args, normproc=['unitL1'], sim_init=None, evalf=evalf)
record('levy2014_k5-l1', simscoocmap)
def glove():
standard_normalize(['glove', 'unit'], 'glove-l2')
standard_normalize(['glove'] + normproc, 'glove-normalize')
A1 = np.array(1.0*d1.Co)
A2 = np.array(1.0*d2.Co)
embeddings.normalize(A1, ['glove', 'unitL1'])
embeddings.normalize(A2, ['glove', 'unitL1'])
_, _, simscoocmap = match.coocmapl1(A1, A2, args, normproc=['unitL1'], sim_init=None, evalf=evalf)
record('glove-l1', simscoocmap)
glove()
# A1 = np.sqrt(d1.Co)
# A2 = np.sqrt(d2.Co)
# embeddings.normalize(A1, normproc)
# embeddings.normalize(A2, normproc)
# _, _, simscoocmap = match.coocmapt(A1, A2, args, normproc=normproc1, sim_init=None, evalf=evalf)
# record('coocmap', simscoocmap)
# A1c = np.array(A1)
# A2c = np.array(A2)
# match.clip(A1c, r1=99, r2=99)
# match.clip(A2c, r1=99, r2=99)
# _, _, simscoocmap = match.coocmapt(A1c, A2c, args, normproc=normproc1, sim_init=None, evalf=evalf)
# record('coocmap-clip', simscoocmap)
# A1f = match.svd_power(A1, beta=1, drop=drop, dim=None)
# A2f = match.svd_power(A2, beta=1, drop=drop, dim=None)
# normproc = ['unit', 'center', 'unit']
# embeddings.normalize(A1f, normproc)
# embeddings.normalize(A2f, normproc)
# match.clip(A1f, r1=99, r2=99)
# match.clip(A2f, r1=99, r2=99)
# # dropinit = match.most_diff_match(simscoocmap, 10)
# dropinit = simscoocmap
# _, _, sim = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=dropinit, evalf=evalf)
# record('coocmap-drop', sim)
# clipinit = sim
def rapp1995(name, init):
alpha = 1.0
A1f = np.power(d1.Co, alpha)
A2f = np.power(d2.Co, alpha)
norm = ['pmi', 'unitL1']
embeddings.normalize(A1f, norm)
embeddings.normalize(A2f, norm)
_, _, simscoocmap = match.coocmapl1(A1f, A2f, args, normproc=['unitL1'], sim_init=init, evalf=evalf)
record(name, simscoocmap)
# rapp1995('rapp1995', None)
# rapp1995('rapp1995-init', clipinit)
def fung1997(name, init):
alpha = 1.0
A1f = np.power(d1.Co, alpha)
A2f = np.power(d2.Co, alpha)
norm = ['fung1997', 'unitL1']
embeddings.normalize(A1f, norm)
embeddings.normalize(A2f, norm)
_, _, simscoocmap = match.coocmapl1(A1f, A2f, args, normproc=['unitL1'], sim_init=init, evalf=evalf)
record(name, simscoocmap)
# fung1997('fung1997-l1', None)
# fung1997('fung1997-l1-init', clipinit)
# normproc1 = ['unit']
# dictinit = dict_init_binary()
# _, _, simdictinit = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=dictinit, evalf=evalf)
# record('dict-init-drop', simdictinit)
# generate a simple grid enumeration
from itertools import product
drops = [20]
dims = [100]
grid_plan = product(drops, dims)
for drop, dim in grid_plan:
if drop >= dim: continue
experiment(drop, dim)
# method = VecMap(d1.vecpath, d2.vecpath, dictpath, wandb.run.dir, cfg)
# method.run()
# res = method.eval(dictpath)
# # write the predictions
# wandb.log({'accuracy': res['accuracy'], 'coverage': res['coverage']})
|
coocmap-main
|
experiments/test_accvsize_cooc.py
|
import os
import subprocess
from dataclasses import dataclass
import lzma
import wandb
import argparse
import shutil
import pandas as pd
import numpy as np
import data
import match
import evaluation
import embeddings
# from baselines import VecMap
os.environ['WANDB_IGNORE_GLOBS'] = 'lan1/*,lan2/*'
os.environ["OMP_NUM_THREADS"] = "4" # export OMP_NUM_THREADS=4
os.environ["OPENBLAS_NUM_THREADS"] = "4" # export OPENBLAS_NUM_THREADS=4
os.environ["MKL_NUM_THREADS"] = "6" # export MKL_NUM_THREADS=6
os.environ["VECLIB_MAXIMUM_THREADS"] = "4" # export VECLIB_MAXIMUM_THREADS=4
os.environ["NUMEXPR_NUM_THREADS"] = "6" # export NUMEXPR_NUM_THREADS=6
os.environ["WANDB_MODE"] = "offline" # turn on to use wandb to sync
defaults = dict(
lan1='enwikishuf',
lan2='eswikishuf',
eval='en-es',
size1=30,
symmetric=1,
width=5,
vectorize='sim_svd', # vectorization fasttext sim_svd trunc word2vec
dim=300,
tokentype='WordLevel', # tokenizer WordLevel, BPE
vocab_size=5000,
limit_alphabet=100,
min_frequency=5,
supervision='unsupervised',
label='none',
)
run = wandb.init(config=defaults, project='data efficiency')
base1 = os.path.join(wandb.run.dir, 'lan1')
base2 = os.path.join(wandb.run.dir, 'lan2')
os.makedirs(base1)
os.makedirs(base2)
cfg = wandb.config
def make_sized(lan, sizemb, pout, skipmb=0):
corpus = data.get_data(lan)
text = corpus.headmb(lan, skipmb+sizemb)
with open(pout, 'wt', encoding='utf-8') as fout:
fout.write(text[int(skipmb*1e6):])
p1 = os.path.join(base1, 'c.txt')
p2 = os.path.join(base2, 'c.txt')
make_sized(cfg.lan1, cfg.size1, p1)
size2 = cfg.size1 if cfg.symmetric == 1 else cfg.size2
skip2 = cfg.size1 if cfg.lan1 == cfg.lan2 else 0
make_sized(cfg.lan2, size2, p2, skipmb=skip2)
d1 = data.Corpus(p1, base1,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
d2 = data.Corpus(p2, base2,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
def get_evaldict():
lan1s, lan2s = cfg.eval.split('-')
eval = data.MUSEEval()
dictpath = os.path.join(wandb.run.dir, 'eval_id.dict')
with open(dictpath, 'wt', encoding='utf-8', errors='surrogateescape') as f:
v1 = d1.tokenizer.get_vocab()
v2 = d2.tokenizer.get_vocab()
intersection = set(v1.keys()).intersection(v2.keys())
print('vocab has overlap of length', len(intersection))
for w in intersection:
f.write(f'{w}\t{w}\n')
# dictid = dictpath
if lan1s != lan2s:
dictpath = os.path.join(wandb.run.dir, 'eval_dict.dict')
lanpath = eval.eval_path(f'{lan1s}-{lan2s}', type='full')
shutil.copyfile(lanpath, dictpath)
return dictpath
dictpath = get_evaldict()
@dataclass
class SearchArgs:
stochastic_interval = 10
stochastic_initial = 1
stochastic_multiplier = 2
threshold = 1e-4
maxiter = 100
eta = 1
method = 'orthogonal' # or orthogonal or lstsq
match = 'vecmap'
csls = True
args = SearchArgs()
dumpdir = os.path.join(wandb.run.dir, 'dump')
os.makedirs(dumpdir, exist_ok=True)
def evalf(sim):
# simf = match.most_diff_match(sim, k=3)
f, stats = evaluation.report_sim(sim, d1.tokenizer, d2.tokenizer, dictpath)
print(stats)
def dict_init_binary(tiebreak=1e-3):
inds = evaluation.dict_to_inds(dictpath, d1.tokenizer, d2.tokenizer, full=False)
sim = tiebreak * np.random.rand(d1.Co.shape[0], d2.Co.shape[0])
for i in range(len(inds[0])):
sim[inds[0][i], inds[1][i]] = 1
sim[0, 0] = 1
return sim
rows = []
def experiment(drop=20, dim=300, r1=99, r2=99):
print('original dim', d1.Co.shape)
def record(type, sim):
print(type)
simd = match.most_diff_match(sim, 10)
df, stats = evaluation.report_sim(simd, d1.tokenizer, d2.tokenizer, dictpath)
info = stats
info.update({'id': run.id, 'drop': drop, 'dim_p': dim, 'method_type': type})
for k, v in cfg.items():
if k in info: print(f'Warning: {k} already exist')
info[k] = v
rows.append(info)
wandb.log({'table': wandb.Table(dataframe=pd.DataFrame.from_records(rows))})
wandb.log({'basicinfo': info})
print(info)
df.to_csv(os.path.join(dumpdir, f'{type}-{drop}-{dim}.csv'))
d1ft = d1.vecs[cfg.vectorize]
d2ft = d2.vecs[cfg.vectorize]
normproc = ['unit', 'center', 'unit']
normproc1 = ['unit']
embeddings.normalize(d1ft, normproc)
embeddings.normalize(d2ft, normproc)
_, _, sim = match.vecmap(d1ft, d2ft, args, evalf=evalf)
record(f'vecmap-{cfg.vectorize}', sim)
def f(Co):
A1 = np.sqrt(Co)
X = match.sim_vecs(A1, dim, alpha=1)
embeddings.normalize(X, normproc)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, sim = match.vecmap(X, Z, args, evalf=evalf)
record('vecmap-raw', sim)
###### coocmap ######
def f(Co):
X = np.sqrt(Co)
embeddings.normalize(X, normproc)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, simscoocmap = match.coocmapt(X, Z, args, normproc=normproc1, sim_init=None, evalf=evalf)
record('coocmap', simscoocmap)
def f(Co):
X = np.sqrt(Co)
embeddings.normalize(X, normproc)
match.clip(X, r1=r1, r2=r2)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, simscoocmap = match.coocmapt(X, Z, args, normproc=normproc1, sim_init=None, evalf=evalf)
record('coocmap-clip', simscoocmap)
dropinit = simscoocmap
def f(Co):
X = np.sqrt(Co)
embeddings.normalize(X, normproc)
X = match.svd_power(X, beta=1, drop=drop, dim=None)
embeddings.normalize(X, normproc)
match.clip(X, r1=r1, r2=r2)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, simscoocmap = match.coocmapt(X, Z, args, normproc=normproc1, sim_init=dropinit, evalf=evalf)
record('coocmap-drop', simscoocmap)
def dict_init():
dictinit = dict_init_binary()
def f(Co):
X = np.sqrt(Co)
embeddings.normalize(X, normproc)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, simdictinit = match.coocmapt(X, Z, args, normproc=normproc1, sim_init=dictinit, evalf=evalf)
record('dict-init', simdictinit)
experiment(drop=20, dim=300)
# use the official vecmap implementation instead, need to turn on data.Corpus.write_vecs to write out vectors
# method = VecMap(d1.vecpath, d2.vecpath, dictpath, wandb.run.dir, cfg)
# method.run()
# res = method.eval(dictpath)
# # write the predictions
# wandb.log({'accuracy': res['accuracy'], 'coverage': res['coverage']})
|
coocmap-main
|
experiments/test_accvsize.py
|
import os
import subprocess
from dataclasses import dataclass
import lzma
import wandb
import argparse
import shutil
import pandas as pd
import numpy as np
import data
import match
import evaluation
import embeddings
# from baselines import VecMap
os.environ['WANDB_IGNORE_GLOBS'] = 'lan1/*,lan2/*'
os.environ["OMP_NUM_THREADS"] = "4" # export OMP_NUM_THREADS=4
os.environ["OPENBLAS_NUM_THREADS"] = "4" # export OPENBLAS_NUM_THREADS=4
os.environ["MKL_NUM_THREADS"] = "6" # export MKL_NUM_THREADS=6
os.environ["VECLIB_MAXIMUM_THREADS"] = "4" # export VECLIB_MAXIMUM_THREADS=4
os.environ["NUMEXPR_NUM_THREADS"] = "6" # export NUMEXPR_NUM_THREADS=6
defaults = dict(
# data
lan1='enwikishuf',
lan2='eswikishuf',
eval='en-es',
size1=10,
# size2=20, skip2=10,
symmetric=1,
width=5,
# vectorization fasttext sim_svd count
vectorize='trunc',
dim=300,
# tokenizer WordLevel, BPE
tokentype='WordLevel',
vocab_size=100,
limit_alphabet=100,
min_frequency=1,
# experiment
supervision='basic-init',
label='none',
)
run = wandb.init(config=defaults, project='data efficiency')
base1 = os.path.join(wandb.run.dir, 'lan1')
base2 = os.path.join(wandb.run.dir, 'lan2')
os.makedirs(base1)
os.makedirs(base2)
cfg = wandb.config
def make_sized(lan, sizemb, pout, skipmb=0):
corpus = data.get_data(lan)
text = corpus.headmb(lan, skipmb+sizemb)
with open(pout, 'wt', encoding='utf-8') as fout:
fout.write(text[int(skipmb*1e6):])
p1 = os.path.join(base1, 'c.txt')
p2 = os.path.join(base2, 'c.txt')
make_sized(cfg.lan1, cfg.size1, p1)
size2 = cfg.size1 if cfg.symmetric == 1 else cfg.size2
skip2 = cfg.size1 if cfg.lan1 == cfg.lan2 else 0
make_sized(cfg.lan2, size2, p2, skipmb=skip2)
##### often fails
d1 = data.Corpus(p1, base1,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
d2 = data.Corpus(p2, base2,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
def get_evaldict():
lan1s, lan2s = cfg.eval.split('-')
eval = data.MUSEEval()
dictpath = os.path.join(wandb.run.dir, 'eval_id.dict')
with open(dictpath, 'wt', encoding='utf-8', errors='surrogateescape') as f:
v1 = d1.tokenizer.get_vocab()
v2 = d2.tokenizer.get_vocab()
intersection = set(v1.keys()).intersection(v2.keys())
for w in intersection:
f.write(f'{w}\t{w}\n')
# dictid = dictpath
if lan1s != lan2s:
dictpath = os.path.join(wandb.run.dir, 'eval_dict.dict')
lanpath = eval.eval_path(f'{lan1s}-{lan2s}', type='full')
shutil.copyfile(lanpath, dictpath)
return dictpath
dictpath = get_evaldict()
@dataclass
class SearchArgs:
stochastic_interval = 20
stochastic_add = 1e-1
stochastic_multiplier = 2
threshold = 1e-4
stochastic_initial = 1
maxswaps = 100
maxiter = 100
eta = 1
#
method = 'orthogonal' # or orthogonal or lstsq
match = 'vecmap'
csls = True
args = SearchArgs()
dumpdir = os.path.join(wandb.run.dir, 'dump')
os.makedirs(dumpdir, exist_ok=True)
def evalf(sim):
# simf = match.most_diff_match(sim, k=3)
f, stats = evaluation.report_sim(sim, d1.tokenizer, d2.tokenizer, dictpath)
print(stats)
def dict_init_binary():
inds = evaluation.dict_to_inds(dictpath, d1.tokenizer, d2.tokenizer, full=False)
sim = np.zeros((d1.Co.shape[0], d2.Co.shape[0]))
for i in range(len(inds[0])):
sim[inds[0][i], inds[1][i]] = 1
return sim
rows = []
def experiment(drop=20, dim=300, r1=1, r2=1):
print('original dim', d1.Co.shape)
def record(type, sim):
print(type)
# plt.figure()
# plt.imshow(sims[type])
simd = match.most_diff_match(sim, 10)
df, stats = evaluation.report_sim(simd, d1.tokenizer, d2.tokenizer, dictpath)
info = stats
info.update({'id': run.id, 'drop': drop, 'dim_p': dim, 'method_type': type})
for k, v in cfg.items():
if k in info: print(f'Warning: {k} already exist')
info[k] = v
rows.append(info)
wandb.log({'table': wandb.Table(dataframe=pd.DataFrame.from_records(rows))})
wandb.log({'basicinfo': info})
print(info)
df.to_csv(os.path.join(dumpdir, f'{type}-{drop}-{dim}.csv'))
normproc1 = ['unit']
normproc = ['unit', 'center', 'unit']
A1 = np.sqrt(d1.Co)
A2 = np.sqrt(d2.Co)
embeddings.normalize(A1, normproc)
embeddings.normalize(A2, normproc)
if cfg.supervision == 'common-init':
coocinit = match.match_sim(A1, A2, sort=True, metric='cosine', norm_proc=['unit', 'center', 'unit'])
elif cfg.supervision == 'clip-init':
A1c = np.array(A1)
A2c = np.array(A2)
match.clip(A1c, r1=99, r2=99)
match.clip(A2c, r1=99, r2=99)
coocinit = match.match_sim(A1c, A2c, sort=True, metric='cosine', norm_proc=['unit', 'center', 'unit'])
else:
coocinit = None
# d1ft = d1.vecs[cfg.vectorize]
# d2ft = d2.vecs[cfg.vectorize]
# embeddings.normalize(d1ft, normproc)
# embeddings.normalize(d2ft, normproc)
# _, _, sim = match.vecmap(d1ft, d2ft, args, evalf=evalf, sim_init=coocinit)
# record(f'vecmap-{cfg.vectorize}', sim)
def coocmapvec():
def sqrt_sim(x):
u, s, vt = np.linalg.svd(x, full_matrices=False)
return (u*s).dot(u.T)
A1f = sqrt_sim(d1ft)
A2f = sqrt_sim(d2ft)
embeddings.normalize(A1f, normproc)
embeddings.normalize(A2f, normproc)
_, _, simscoocmap = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=coocinit, evalf=evalf)
record(f'coocmap-{cfg.vectorize}', simscoocmap)
A1c = np.array(A1f)
A2c = np.array(A2f)
match.clip(A1c, r1=99, r2=99)
match.clip(A2c, r1=99, r2=99)
_, _, simscoocmap = match.coocmapt(A1c, A2c, args, normproc=normproc1, sim_init=coocinit, evalf=evalf)
record(f'coocmap-{cfg.vectorize}-clip', simscoocmap)
initdrop = simscoocmap
A1d = match.svd_power(A1f, beta=1, drop=drop, dim=dim)
A2d = match.svd_power(A2f, beta=1, drop=drop, dim=dim)
embeddings.normalize(A1d, normproc)
embeddings.normalize(A2d, normproc)
match.clip(A1d, r1=99, r2=99)
match.clip(A2d, r1=99, r2=99)
_, _, simscoocmap = match.coocmapt(A1d, A2d, args, normproc=normproc1, sim_init=initdrop, evalf=evalf)
record(f'coocmap-{cfg.vectorize}-drop', simscoocmap)
# coocmapvec()
# what if I get the correspondence analysis vectors here??
# sim0 = match.match_sim(A1, A2, sort=True, metric='cosine', norm_proc=['unit', 'center', 'unit'])
A1f = match.svd_power(A1, beta=1, drop=None, dim=dim)
A2f = match.svd_power(A2, beta=1, drop=None, dim=dim)
embeddings.normalize(A1f, normproc)
embeddings.normalize(A2f, normproc)
# _, _, simscoocmap = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=coocinit, evalf=evalf)
# record('coocmap', simscoocmap)
A1c = np.array(A1f)
A2c = np.array(A2f)
match.clip(A1c, r1=100-r1, r2=100-r2)
match.clip(A2c, r1=100-r1, r2=100-r2)
_, _, simscoocmap = match.coocmapt(A1c, A2c, args, normproc=normproc1, sim_init=coocinit, evalf=evalf)
record(f'coocmap-clip-{r1:.1f}-{r2:.1f}', simscoocmap)
dropinit = simscoocmap
A1f = match.svd_power(A1, beta=1, drop=drop, dim=dim)
A2f = match.svd_power(A2, beta=1, drop=drop, dim=dim)
embeddings.normalize(A1f, normproc)
embeddings.normalize(A2f, normproc)
match.clip(A1f, r1=100-r1, r2=100-r2)
match.clip(A2f, r1=100-r1, r2=100-r2)
# dropinit = match.most_diff_match(simscoocmap, 10)
_, _, sim = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=dropinit, evalf=evalf)
record(f'coocmap-drop-{r1:.1f}-{r2:.1f}', sim)
A1f = match.svd_power(A1c, beta=1, drop=drop, dim=dim)
A2f = match.svd_power(A2c, beta=1, drop=drop, dim=dim)
embeddings.normalize(A1f, normproc)
embeddings.normalize(A2f, normproc)
match.clip(A1f, r1=100-r1, r2=100-r2)
match.clip(A2f, r1=100-r1, r2=100-r2)
_, _, sim = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=dropinit, evalf=evalf)
record(f'coocmap-clip-drop-{r1:.1f}-{r2:.1f}', sim)
# generate a simple grid enumeration
r1 = [0.5, 1, 2, 5]
r2 = [0.5, 1, 2, 5]
from itertools import product
grid_plan = list(product(r1, r2))
print(grid_plan)
np.random.shuffle(grid_plan)
for r1, r2 in grid_plan:
drop = np.ceil(min(20, int(cfg.dim) * 20/400)) # 400 -> 20
experiment(drop, int(cfg.dim), r1, r2)
# method = VecMap(d1.vecpath, d2.vecpath, dictpath, wandb.run.dir, cfg)
# method.run()
# res = method.eval(dictpath)
# # write the predictions
# wandb.log({'accuracy': res['accuracy'], 'coverage': res['coverage']})
|
coocmap-main
|
experiments/test_dropclip.py
|
import os
import subprocess
from dataclasses import dataclass
import lzma
import wandb
import argparse
import shutil
import pandas as pd
import numpy as np
import data
import match
import evaluation
import embeddings
# from baselines import VecMap
os.environ['WANDB_IGNORE_GLOBS'] = 'lan1/*,lan2/*'
os.environ["OMP_NUM_THREADS"] = "4" # export OMP_NUM_THREADS=4
os.environ["OPENBLAS_NUM_THREADS"] = "4" # export OPENBLAS_NUM_THREADS=4
os.environ["MKL_NUM_THREADS"] = "6" # export MKL_NUM_THREADS=6
os.environ["VECLIB_MAXIMUM_THREADS"] = "4" # export VECLIB_MAXIMUM_THREADS=4
os.environ["NUMEXPR_NUM_THREADS"] = "6" # export NUMEXPR_NUM_THREADS=6
defaults = dict(
# data
lan1='enwikishuf',
lan2='eswikishuf',
eval='en-es',
size1=10,
# size2=20, skip2=10,
symmetric=1,
width=3,
# vectorization fasttext sim_svd count
vectorize='trunc',
dim=300,
# tokenizer WordLevel, BPE
tokentype='WordLevel',
vocab_size=100,
limit_alphabet=100,
min_frequency=1,
# experiment
supervision='basic-init',
label='clipanddropmore',
)
run = wandb.init(config=defaults, project='data efficiency')
base1 = os.path.join(wandb.run.dir, 'lan1')
base2 = os.path.join(wandb.run.dir, 'lan2')
os.makedirs(base1)
os.makedirs(base2)
cfg = wandb.config
def make_sized(lan, sizemb, pout, skipmb=0):
corpus = data.get_data(lan)
text = corpus.headmb(lan, skipmb+sizemb)
with open(pout, 'wt', encoding='utf-8') as fout:
fout.write(text[int(skipmb*1e6):])
p1 = os.path.join(base1, 'c.txt')
p2 = os.path.join(base2, 'c.txt')
make_sized(cfg.lan1, cfg.size1, p1)
size2 = cfg.size1 if cfg.symmetric == 1 else cfg.size2
skip2 = cfg.size1 if cfg.lan1 == cfg.lan2 else 0
make_sized(cfg.lan2, size2, p2, skipmb=skip2)
##### often fails
d1 = data.Corpus(p1, base1,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
d2 = data.Corpus(p2, base2,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
def get_evaldict():
lan1s, lan2s = cfg.eval.split('-')
eval = data.MUSEEval()
dictpath = os.path.join(wandb.run.dir, 'eval_id.dict')
with open(dictpath, 'wt', encoding='utf-8', errors='surrogateescape') as f:
v1 = d1.tokenizer.get_vocab()
v2 = d2.tokenizer.get_vocab()
intersection = set(v1.keys()).intersection(v2.keys())
for w in intersection:
f.write(f'{w}\t{w}\n')
# dictid = dictpath
if lan1s != lan2s:
dictpath = os.path.join(wandb.run.dir, 'eval_dict.dict')
lanpath = eval.eval_path(f'{lan1s}-{lan2s}', type='full')
shutil.copyfile(lanpath, dictpath)
return dictpath
dictpath = get_evaldict()
@dataclass
class SearchArgs:
stochastic_interval = 10
stochastic_add = 1e-1
stochastic_multiplier = 2
threshold = 1e-4
stochastic_initial = 1
maxswaps = 100
maxiter = 100
eta = 1
#
method = 'orthogonal' # or orthogonal or lstsq
match = 'vecmap'
csls = True
args = SearchArgs()
dumpdir = os.path.join(wandb.run.dir, 'dump')
os.makedirs(dumpdir, exist_ok=True)
def evalf(sim):
# simf = match.most_diff_match(sim, k=3)
f, stats = evaluation.report_sim(sim, d1.tokenizer, d2.tokenizer, dictpath)
print(stats)
def dict_init_binary():
inds = evaluation.dict_to_inds(dictpath, d1.tokenizer, d2.tokenizer, full=False)
sim = np.zeros((d1.Co.shape[0], d2.Co.shape[0]))
for i in range(len(inds[0])):
sim[inds[0][i], inds[1][i]] = 1
return sim
rows = []
def experiment(drop=20, dim=300):
print('original dim', d1.Co.shape)
def record(type, sim):
print(type)
# plt.figure()
# plt.imshow(sims[type])
simd = match.most_diff_match(sim, 10)
df, stats = evaluation.report_sim(simd, d1.tokenizer, d2.tokenizer, dictpath)
info = stats
info.update({'id': run.id, 'drop': drop, 'dim_p': dim, 'method_type': type})
for k, v in cfg.items():
if k in info: print(f'Warning: {k} already exist')
info[k] = v
rows.append(info)
wandb.log({'table': wandb.Table(dataframe=pd.DataFrame.from_records(rows))})
wandb.log({'basicinfo': info})
print(info)
df.to_csv(os.path.join(dumpdir, f'{type}-{drop}-{dim}.csv'))
normproc1 = ['unit']
normproc = ['unit', 'center', 'unit']
A1 = np.sqrt(d1.Co)
A2 = np.sqrt(d2.Co)
embeddings.normalize(A1, normproc)
embeddings.normalize(A2, normproc)
if cfg.supervision == 'common-init':
coocinit = match.match_sim(A1, A2, sort=True, metric='cosine', norm_proc=['unit', 'center', 'unit'])
elif cfg.supervision == 'clip-init':
A1c = np.array(A1)
A2c = np.array(A2)
match.clip(A1c, r1=99, r2=99)
match.clip(A2c, r1=99, r2=99)
coocinit = match.match_sim(A1c, A2c, sort=True, metric='cosine', norm_proc=['unit', 'center', 'unit'])
else:
coocinit = None
d1ft = d1.vecs[cfg.vectorize]
d2ft = d2.vecs[cfg.vectorize]
embeddings.normalize(d1ft, normproc)
embeddings.normalize(d2ft, normproc)
_, _, sim = match.vecmap(d1ft, d2ft, args, evalf=evalf, sim_init=coocinit)
record(f'vecmap-{cfg.vectorize}', sim)
def coocmapvec():
def sqrt_sim(x):
u, s, vt = np.linalg.svd(x, full_matrices=False)
return (u*s).dot(u.T)
A1f = sqrt_sim(d1ft)
A2f = sqrt_sim(d2ft)
embeddings.normalize(A1f, normproc)
embeddings.normalize(A2f, normproc)
_, _, simscoocmap = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=coocinit, evalf=evalf)
record(f'coocmap-{cfg.vectorize}', simscoocmap)
A1c = np.array(A1f)
A2c = np.array(A2f)
match.clip(A1c, r1=99, r2=99)
match.clip(A2c, r1=99, r2=99)
_, _, simscoocmap = match.coocmapt(A1c, A2c, args, normproc=normproc1, sim_init=coocinit, evalf=evalf)
record(f'coocmap-{cfg.vectorize}-clip', simscoocmap)
initdrop = simscoocmap
A1d = match.svd_power(A1f, beta=1, drop=drop, dim=dim)
A2d = match.svd_power(A2f, beta=1, drop=drop, dim=dim)
embeddings.normalize(A1d, normproc)
embeddings.normalize(A2d, normproc)
match.clip(A1d, r1=99, r2=99)
match.clip(A2d, r1=99, r2=99)
_, _, simscoocmap = match.coocmapt(A1d, A2d, args, normproc=normproc1, sim_init=initdrop, evalf=evalf)
record(f'coocmap-{cfg.vectorize}-drop', simscoocmap)
coocmapvec()
# what if I get the correspondence analysis vectors here??
# sim0 = match.match_sim(A1, A2, sort=True, metric='cosine', norm_proc=['unit', 'center', 'unit'])
A1f = match.svd_power(A1, beta=1, drop=None, dim=dim)
A2f = match.svd_power(A2, beta=1, drop=None, dim=dim)
embeddings.normalize(A1f, normproc)
embeddings.normalize(A2f, normproc)
_, _, simscoocmap = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=coocinit, evalf=evalf)
record('coocmap', simscoocmap)
A1c = np.array(A1f)
A2c = np.array(A2f)
match.clip(A1c, r1=98.5, r2=98.5)
match.clip(A2c, r1=98.5, r2=98.5)
_, _, simscoocmap = match.coocmapt(A1c, A2c, args, normproc=normproc1, sim_init=coocinit, evalf=evalf)
record('coocmap-clip-1.5', simscoocmap)
dropinit_more = simscoocmap
A1c = np.array(A1f)
A2c = np.array(A2f)
match.clip(A1c, r1=99, r2=99)
match.clip(A2c, r1=99, r2=99)
_, _, simscoocmap = match.coocmapt(A1c, A2c, args, normproc=normproc1, sim_init=coocinit, evalf=evalf)
record('coocmap-clip', simscoocmap)
dropinit = simscoocmap
A1f = match.svd_power(A1, beta=1, drop=drop, dim=dim)
A2f = match.svd_power(A2, beta=1, drop=drop, dim=dim)
embeddings.normalize(A1f, normproc)
embeddings.normalize(A2f, normproc)
match.clip(A1f, r1=99, r2=99)
match.clip(A2f, r1=99, r2=99)
# dropinit = match.most_diff_match(simscoocmap, 10)
_, _, sim = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=dropinit, evalf=evalf)
record('coocmap-drop', sim)
A1f = match.svd_power(A1, beta=1, drop=drop, dim=dim)
A2f = match.svd_power(A2, beta=1, drop=drop, dim=dim)
embeddings.normalize(A1f, normproc)
embeddings.normalize(A2f, normproc)
match.clip(A1f, r1=98.5, r2=98.5)
match.clip(A2f, r1=98.5, r2=98.5)
_, _, sim = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=dropinit_more, evalf=evalf)
record('coocmap-drop-1.5', sim)
# normproc1 = ['unit']
# dictinit = dict_init_binary()
# _, _, simdictinit = match.coocmapt(A1, A2, args, normproc=normproc1, sim_init=dictinit, evalf=evalf)
# record('dict-init', simdictinit)
# generate a simple grid enumeration
drop = np.ceil(min(20, int(cfg.dim) * 20/400)) # 400 -> 20
experiment(int(drop), int(cfg.dim))
# method = VecMap(d1.vecpath, d2.vecpath, dictpath, wandb.run.dir, cfg)
# method.run()
# res = method.eval(dictpath)
# # write the predictions
# wandb.log({'accuracy': res['accuracy'], 'coverage': res['coverage']})
|
coocmap-main
|
experiments/test_accvdim.py
|
import os
from dataclasses import dataclass
import wandb
import shutil
import pandas as pd
import numpy as np
import data
import match
import evaluation
import embeddings
# experimental parameters
defaults = dict(
lan1='./europarl-v7.hu-en.en',
lan2='./europarl-v7.hu-en.hu',
eval='en-hu',
size1=20,
width=5,
symmetric=1,
vectorize='trunc', # fasttext sim_svd trunc word2vec
dim=300,
tokentype='WordLevel', # tokenizer WordLevel, BPE
vocab_size=5000,
limit_alphabet=100,
min_frequency=5,
supervision='unsupervised',
label='none',
)
os.environ["OMP_NUM_THREADS"] = "4" # export OMP_NUM_THREADS=4
os.environ["OPENBLAS_NUM_THREADS"] = "4" # export OPENBLAS_NUM_THREADS=4
os.environ["MKL_NUM_THREADS"] = "6" # export MKL_NUM_THREADS=6
os.environ["VECLIB_MAXIMUM_THREADS"] = "4" # export VECLIB_MAXIMUM_THREADS=4
os.environ["NUMEXPR_NUM_THREADS"] = "6" # export NUMEXPR_NUM_THREADS=6
os.environ['WANDB_IGNORE_GLOBS'] = 'lan1/*,lan2/*'
os.environ["WANDB_MODE"] = "offline" # switch to "online" to use wandb cloud sync
run = wandb.init(config=defaults, project='data efficiency')
base1 = os.path.join(wandb.run.dir, 'lan1')
base2 = os.path.join(wandb.run.dir, 'lan2')
os.makedirs(base1)
os.makedirs(base2)
cfg = wandb.config
def make_sized(lan, sizemb, pout, skipmb=0):
corpus = data.get_data(lan)
text = corpus.headmb(lan, skipmb+sizemb)
with open(pout, 'wt', encoding='utf-8') as fout:
fout.write(text[int(skipmb*1e6):])
p1 = os.path.join(base1, 'c.txt')
p2 = os.path.join(base2, 'c.txt')
make_sized(cfg.lan1, cfg.size1, p1)
size2 = cfg.size1 if cfg.symmetric == 1 else cfg.size2
skip2 = cfg.size1 if cfg.lan1 == cfg.lan2 else 0
make_sized(cfg.lan2, size2, p2, skipmb=skip2)
d1 = data.Corpus(p1, base1,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
d2 = data.Corpus(p2, base2,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
def get_evaldict():
lan1s, lan2s = cfg.eval.split('-')
eval = data.MUSEEval()
dictpath = os.path.join(wandb.run.dir, 'eval_id.dict')
with open(dictpath, 'wt', encoding='utf-8', errors='surrogateescape') as f:
v1 = d1.tokenizer.get_vocab()
v2 = d2.tokenizer.get_vocab()
intersection = set(v1.keys()).intersection(v2.keys())
print('vocab has overlap of length', len(intersection))
for w in intersection:
f.write(f'{w}\t{w}\n')
# dictid = dictpath
if lan1s != lan2s:
dictpath = os.path.join(wandb.run.dir, 'eval_dict.dict')
lanpath = eval.eval_path(f'{lan1s}-{lan2s}', type='full')
shutil.copyfile(lanpath, dictpath)
return dictpath
dictpath = get_evaldict()
@dataclass
class SearchArgs:
stochastic_interval = 5
stochastic_initial = 1
stochastic_multiplier = 2
threshold = 1e-4
maxiter = 100
eta = 1
method = 'orthogonal' # or orthogonal or lstsq
match = 'vecmap'
csls = True
args = SearchArgs()
dumpdir = os.path.join(wandb.run.dir, 'dump')
os.makedirs(dumpdir, exist_ok=True)
def evalf(sim):
# simf = match.most_diff_match(sim, k=10)
f, stats = evaluation.report_sim(sim, d1.tokenizer, d2.tokenizer, dictpath)
print(stats)
def dict_init_binary(tiebreak=1e-3):
inds = evaluation.dict_to_inds(dictpath, d1.tokenizer, d2.tokenizer, full=False)
sim = tiebreak * np.random.rand(d1.Co.shape[0], d2.Co.shape[0])
for i in range(len(inds[0])):
sim[inds[0][i], inds[1][i]] = 1
sim[0, 0] = 1
return sim
rows = []
def experiment(drop=20, dim=300, r1=99, r2=99):
def record(type, sim):
print(type)
simd = match.most_diff_match(sim, 10)
df, stats = evaluation.report_sim(simd, d1.tokenizer, d2.tokenizer, dictpath)
info = stats
info.update({'id': run.id, 'drop': drop, 'dim_p': dim, 'method_type': type})
for k, v in cfg.items():
if k in info: print(f'Warning: {k} already exist')
info[k] = v
rows.append(info)
wandb.log({'table': wandb.Table(dataframe=pd.DataFrame.from_records(rows))})
wandb.log({'basicinfo': info})
print(info)
df.to_csv(os.path.join(dumpdir, f'{type}-{drop}-{dim}.csv'))
normproc = ['unit', 'center', 'unit']
normproc1 = ['unit']
def f(Co):
X = np.sqrt(Co)
embeddings.normalize(X, normproc)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, simscoocmap = match.coocmapt(X, Z, args, normproc=normproc1, sim_init=None, evalf=evalf)
record('coocmap', simscoocmap)
def clip_drop():
def f(Co):
X = np.sqrt(Co)
embeddings.normalize(X, normproc)
match.clip(X, r1=r1, r2=r2)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, simscoocmap = match.coocmapt(X, Z, args, normproc=normproc1, sim_init=None, evalf=evalf)
record('coocmap-clip', simscoocmap)
dropinit = simscoocmap
def f(Co):
X = np.sqrt(Co)
embeddings.normalize(X, normproc)
X = match.svd_power(X, beta=1, drop=drop, dim=None)
embeddings.normalize(X, normproc)
match.clip(X, r1=r1, r2=r2)
return X
X, Z = f(d1.Co), f(d2.Co)
_, _, simscoocmap = match.coocmapt(X, Z, args, normproc=normproc1, sim_init=dropinit, evalf=evalf)
record('coocmap-drop', simscoocmap)
# clip_drop() # run clip and drop as well
experiment(drop=20, dim=300)
|
coocmap-main
|
experiments/test_coocmap.py
|
import os
import subprocess
from dataclasses import dataclass
import lzma
import wandb
import argparse
import shutil
import pandas as pd
import numpy as np
import data
import match
import evaluation
import embeddings
# from baselines import VecMap
os.environ['WANDB_IGNORE_GLOBS'] = 'lan1/*,lan2/*'
os.environ["OMP_NUM_THREADS"] = "4" # export OMP_NUM_THREADS=4
os.environ["OPENBLAS_NUM_THREADS"] = "4" # export OPENBLAS_NUM_THREADS=4
os.environ["MKL_NUM_THREADS"] = "6" # export MKL_NUM_THREADS=6
os.environ["VECLIB_MAXIMUM_THREADS"] = "4" # export VECLIB_MAXIMUM_THREADS=4
os.environ["NUMEXPR_NUM_THREADS"] = "6" # export NUMEXPR_NUM_THREADS=6
defaults = dict(
# data
lan1='enwikishuf',
lan2='eswikishuf',
eval='en-es',
size1=50,
# size2=20, skip2=10,
symmetric=1,
width=5,
# vectorization fasttext sim_svd count
vectorize='fasttext',
dim=300,
# tokenizer WordLevel, BPE
tokentype='WordLevel',
vocab_size=5000,
limit_alphabet=100,
min_frequency=5,
# experiment
supervision='unsupervised',
label='iter100',
)
run = wandb.init(config=defaults, project='data efficiency')
base1 = os.path.join(wandb.run.dir, 'lan1')
base2 = os.path.join(wandb.run.dir, 'lan2')
os.makedirs(base1)
os.makedirs(base2)
cfg = wandb.config
def make_sized(lan, sizemb, pout, skipmb=0):
corpus = data.get_data(lan)
text = corpus.headmb(lan, skipmb+sizemb)
with open(pout, 'wt', encoding='utf-8') as fout:
fout.write(text[int(skipmb*1e6):])
p1 = os.path.join(base1, 'c.txt')
p2 = os.path.join(base2, 'c.txt')
make_sized(cfg.lan1, cfg.size1, p1)
size2 = cfg.size1 if cfg.symmetric == 1 else cfg.size2
skip2 = cfg.size1 if cfg.lan1 == cfg.lan2 else 0
make_sized(cfg.lan2, size2, p2, skipmb=skip2)
d1 = data.Corpus(p1, base1,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
d2 = data.Corpus(p2, base2,
tokentype=cfg.tokentype, vocab_size=cfg.vocab_size, limit_alphabet=cfg.limit_alphabet, min_frequency=cfg.min_frequency,
vectorize=cfg.vectorize, width=cfg.width, dim=cfg.dim)
def get_evaldict():
lan1s, lan2s = cfg.eval.split('-')
eval = data.MUSEEval()
dictpath = os.path.join(wandb.run.dir, 'eval_id.dict')
with open(dictpath, 'wt', encoding='utf-8', errors='surrogateescape') as f:
v1 = d1.tokenizer.get_vocab()
v2 = d2.tokenizer.get_vocab()
intersection = set(v1.keys()).intersection(v2.keys())
print('vocab has overlap of length', len(intersection))
for w in intersection:
f.write(f'{w}\t{w}\n')
# dictid = dictpath
if lan1s != lan2s:
dictpath = os.path.join(wandb.run.dir, 'eval_dict.dict')
lanpath = eval.eval_path(f'{lan1s}-{lan2s}', type='full')
shutil.copyfile(lanpath, dictpath)
return dictpath
dictpath = get_evaldict()
@dataclass
class SearchArgs:
stochastic_interval = 10
threshold = 1e-4
stochastic_initial = 1
stochastic_multiplier = 2
maxswaps = 100
maxiter = 50
eta = 1
#
method = 'orthogonal' # or orthogonal or lstsq
match = 'vecmap'
csls = True
args = SearchArgs()
dumpdir = os.path.join(wandb.run.dir, 'dump')
os.makedirs(dumpdir, exist_ok=True)
def evalf(sim):
# simf = match.most_diff_match(sim, k=3)
f, stats = evaluation.report_sim(sim, d1.tokenizer, d2.tokenizer, dictpath)
print(stats)
def dict_init_binary():
inds = evaluation.dict_to_inds(dictpath, d1.tokenizer, d2.tokenizer, full=False)
sim = np.zeros((d1.Co.shape[0], d2.Co.shape[0]))
for i in range(len(inds[0])):
sim[inds[0][i], inds[1][i]] = 1
return sim
rows = []
def record(type, sim):
print('recording', type)
simd = match.most_diff_match(sim, 10)
df, stats = evaluation.report_sim(simd, d1.tokenizer, d2.tokenizer, dictpath)
info = stats
info.update({'id': run.id, 'method_type': type})
for k, v in cfg.items():
if k in info: print(f'Warning: {k} already exist')
info[k] = v
rows.append(info)
wandb.log({'table': wandb.Table(dataframe=pd.DataFrame.from_records(rows))})
wandb.log({'basicinfo': info})
print(info)
df.to_csv(os.path.join(dumpdir, f'{type}.csv'))
def experiment(dim):
namemap = {'vecmap': 'bidir', 'coocmap': 'greedy'}
name = namemap[args.match]
label = f'-{name}-csls' if args.csls else f'{name}'
print('original dim', d1.Co.shape)
d1ft = d1.vecs[cfg.vectorize]
d2ft = d2.vecs[cfg.vectorize]
normproc = ['unit', 'center', 'unit']
embeddings.normalize(d1ft, normproc)
embeddings.normalize(d2ft, normproc)
_, _, sim = match.vecmap(d1ft, d2ft, args, evalf=evalf)
record('vecmap-fasttext' + label, sim)
def sqrt_sim(x):
u, s, vt = np.linalg.svd(x, full_matrices=False)
return (u*s).dot(u.T)
A1f = sqrt_sim(d1ft)
A2f = sqrt_sim(d2ft)
normproc = ['unit', 'center', 'unit']
embeddings.normalize(A1f, normproc)
embeddings.normalize(A2f, normproc)
normproc1 = ['unit']
_, _, simscoocmap = match.coocmapt(A1f, A2f, args, normproc=normproc1, sim_init=None, evalf=evalf)
record(f'coocmap-{cfg.vectorize}-sqrt' + label, simscoocmap)
A1 = np.sqrt(d1.Co)
A2 = np.sqrt(d2.Co)
dn1 = match.sim_vecs(A1, dim, alpha=1)
dn2 = match.sim_vecs(A2, dim, alpha=1)
# dn1 = np.array(d1.vec)
# dn2 = np.array(d2.vec)
embeddings.normalize(dn1, normproc)
embeddings.normalize(dn2, normproc)
_, _, sim = match.vecmap(dn1, dn2, args, evalf=evalf)
record('vecmap-raw' + label, sim)
###### coocmap ######
embeddings.normalize(A1, normproc)
embeddings.normalize(A2, normproc)
normproc1 = ['unit']
_, _, simscoocmap = match.coocmapt(A1, A2, args, normproc=normproc1, sim_init=None, evalf=evalf)
record('coocmap' + label, simscoocmap)
# what if I get the correspondence analysis vectors here??
# sim0 = match.match_sim(A1, A2, sort=True, metric='cosine', norm_proc=['unit', 'center', 'unit'])
normproc1 = ['unit']
dictinit = dict_init_binary()
_, _, simdictinit = match.coocmapt(A1, A2, args, normproc=normproc1, sim_init=dictinit, evalf=evalf)
record('dict-init' + label, simdictinit)
args.match = 'vecmap'
args.csls = True
experiment(cfg.dim)
args.csls = False
experiment(cfg.dim)
# args.match = 'coocmap'
# args.csls = True
# experiment(cfg.dim)
# args.csls = False
# experiment(cfg.dim)
|
coocmap-main
|
experiments/test_matching.py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.