vector_deactivate
Regular
4.4
: Absent ⚠️
4.8
: Absent ⚠️
4.10
: Absent ⚠️
4.13
: Absent ⚠️
4.15
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
4.18
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
5.0
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
5.3
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
5.4
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
5.8
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
5.11
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
5.13
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
5.15
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
5.19
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
6.2
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
6.5
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
6.8
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
arm64
: Absent ⚠️
armhf
: Absent ⚠️
ppc64el
: Absent ⚠️
riscv64
: Absent ⚠️
aws
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
azure
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
gcp
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
lowlatency
: ✅Event:
struct trace_event_raw_vector_activate {
struct trace_entry ent;
unsigned int irq;
bool is_managed;
bool can_reserve;
bool reserve;
char __data[0];
};
Function:
void trace_event_raw_event_vector_activate(void *__data, unsigned int irq, bool is_managed, bool can_reserve, bool reserve);
Regular
4.15
and 4.18
✅
4.18
and 5.0
✅
5.0
and 5.3
✅
5.3
and 5.4
✅
5.4
and 5.8
✅
5.8
and 5.11
✅
5.11
and 5.13
✅
5.13
and 5.15
✅
5.15
and 5.19
✅
5.19
and 6.2
✅
6.2
and 6.5
✅
6.5
and 6.8
✅
generic
and aws
✅
generic
and azure
✅
generic
and gcp
✅
generic
and lowlatency
✅