reid from https://github.com/michuanhaohao/reid-strong-baseline
zhangmeng
2020-01-17 1bb4d137919cae4f57f95a2572ee612dcabb3b3d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
#pragma once
 
#include <torch/csrc/python_headers.h>
#include <memory>
#include <ATen/ATen.h>
 
#include <torch/csrc/autograd/variable.h>
#include <torch/csrc/THP_export.h>
 
// Python object that backs torch.autograd.Variable
// NOLINTNEXTLINE(cppcoreguidelines-pro-type-member-init)
struct THPVariable {
    PyObject_HEAD
    // Payload
    torch::autograd::Variable cdata;
    // Hooks to be run on backwards pass (corresponds to Python attr
    // '_backwards_hooks', set by 'register_hook')
    PyObject* backward_hooks = nullptr;
};
 
THP_API PyObject *THPVariableClass;
 
bool THPVariable_initModule(PyObject *module);
THP_API PyObject * THPVariable_Wrap(torch::autograd::Variable var);
 
static inline bool THPVariable_CheckExact(PyObject *obj) {
  return Py_TYPE(obj) == (PyTypeObject*)THPVariableClass;
}
 
inline bool THPVariable_Check(PyObject *obj)
{
  return THPVariableClass && PyObject_IsInstance(obj, THPVariableClass);
}
 
inline torch::autograd::Variable& THPVariable_Unpack(PyObject* obj) {
  auto var = (THPVariable*)obj;
  return var->cdata;
}