diff --git a/gen/src/lib.rs b/gen/src/lib.rs index 472d1fc..a110fdc 100644 --- a/gen/src/lib.rs +++ b/gen/src/lib.rs @@ -231,7 +231,8 @@ impl SingleOpcodeDefinition { } fn extract_and_insert( - output: &mut FxHashMap>, + definitions: &mut FxHashMap>, + special_definitions: &mut FxHashMap, parser::OpcodeDefinition(pattern_seq, rules): parser::OpcodeDefinition, ) { let (mut named_rules, mut unnamed_rules) = gather_rules(rules); @@ -242,8 +243,18 @@ impl SingleOpcodeDefinition { named_rules = FxHashMap::default(); unnamed_rules = FxHashMap::default(); } - let mut possible_modifiers = FxHashSet::default(); let parser::OpcodeDecl(instruction, arguments) = opcode_decl; + if code_block.special { + if !instruction.modifiers.is_empty() || !arguments.0.is_empty() { + panic!( + "`{}`: no modifiers or arguments are allowed in parser definition.", + instruction.name + ); + } + special_definitions.insert(instruction.name, code_block.code); + continue; + } + let mut possible_modifiers = FxHashSet::default(); let mut unordered_modifiers = instruction .modifiers .into_iter() @@ -287,7 +298,7 @@ impl SingleOpcodeDefinition { arguments, code_block, }; - multihash_extend(output, current_opcode.clone(), entry); + multihash_extend(definitions, current_opcode.clone(), entry); last_opcode = current_opcode; } } @@ -350,10 +361,15 @@ fn gather_rules( pub fn derive_parser(tokens: proc_macro::TokenStream) -> proc_macro::TokenStream { let parse_definitions = parse_macro_input!(tokens as gen_impl::parser::ParseDefinitions); let mut definitions = FxHashMap::default(); + let mut special_definitions = FxHashMap::default(); let types = OpcodeDefinitions::get_enum_types(&parse_definitions.definitions); let enum_types_tokens = emit_enum_types(types, parse_definitions.additional_enums); for definition in parse_definitions.definitions.into_iter() { - SingleOpcodeDefinition::extract_and_insert(&mut definitions, definition); + SingleOpcodeDefinition::extract_and_insert( + &mut definitions, + &mut special_definitions, + definition, + ); } let definitions = definitions .into_iter() @@ -363,9 +379,12 @@ pub fn derive_parser(tokens: proc_macro::TokenStream) -> proc_macro::TokenStream }) .collect::>(); let mut token_enum = parse_definitions.token_type; - let (all_opcode, all_modifier) = - write_definitions_into_tokens(&definitions, &mut token_enum.variants); - let token_impl = emit_parse_function(&token_enum.ident, &definitions, all_opcode, all_modifier); + let (all_opcode, all_modifier) = write_definitions_into_tokens( + &definitions, + special_definitions.keys(), + &mut token_enum.variants, + ); + let token_impl = emit_parse_function(&token_enum.ident, &definitions, &special_definitions, all_opcode, all_modifier); let tokens = quote! { #enum_types_tokens @@ -422,6 +441,7 @@ fn emit_enum_types( fn emit_parse_function( type_name: &Ident, defs: &FxHashMap, + special_defs: &FxHashMap, all_opcode: Vec<&Ident>, all_modifier: FxHashSet<&parser::DotModifier>, ) -> TokenStream { @@ -433,7 +453,7 @@ fn emit_parse_function( let mut fn_name = opcode.to_string(); write!(&mut fn_name, "_{}", idx).ok(); let fn_name = Ident::new(&fn_name, Span::call_site()); - let code_block = &def.code_block.0; + let code_block = &def.code_block.code; let args = def.function_arguments_declarations(); quote! { fn #fn_name<'input>(state: &mut PtxParserState, #(#args),* ) -> Instruction> #code_block @@ -494,7 +514,12 @@ fn emit_parse_function( } .to_tokens(&mut result); result - }); + }).chain(special_defs.iter().map(|(opcode, code)| { + let opcode_variant = Ident::new(&capitalize(&opcode.to_string()), opcode.span()); + quote! { + #opcode_variant => { #code? } + } + })); let opcodes = all_opcode.into_iter().map(|op_ident| { let op = op_ident.to_string(); let variant = Ident::new(&capitalize(&op), op_ident.span()); @@ -749,7 +774,7 @@ fn emit_definition_parser( }; let pre_pipe = if arg.pre_pipe { quote! { - any.verify(|t| *t == #token_type::Or).void() + any.verify(|t| *t == #token_type::Pipe).void() } } else { quote! { @@ -845,6 +870,7 @@ fn emit_definition_parser( fn write_definitions_into_tokens<'a>( defs: &'a FxHashMap, + special_definitions: impl Iterator, variants: &mut Punctuated, ) -> (Vec<&'a Ident>, FxHashSet<&'a parser::DotModifier>) { let mut all_opcodes = Vec::new(); @@ -864,6 +890,16 @@ fn write_definitions_into_tokens<'a>( } } } + for opcode in special_definitions { + all_opcodes.push(opcode); + let opcode_as_string = opcode.to_string(); + let variant_name = Ident::new(&capitalize(&opcode_as_string), opcode.span()); + let arg: Variant = syn::parse_quote! { + #[token(#opcode_as_string)] + #variant_name + }; + variants.push(arg); + } for modifier in all_modifiers.iter() { let modifier_as_string = modifier.to_string(); let variant_name = modifier.dot_capitalized(); diff --git a/gen_impl/src/lib.rs b/gen_impl/src/lib.rs index 57660fb..39cc30e 100644 --- a/gen_impl/src/lib.rs +++ b/gen_impl/src/lib.rs @@ -1,8 +1,8 @@ use proc_macro2::TokenStream; use quote::{format_ident, quote, ToTokens}; use syn::{ - braced, parse::Parse, punctuated::Punctuated, token, Expr, Ident, Token, Type, TypeParam, - Visibility, + braced, parse::Parse, punctuated::Punctuated, token, Expr, Ident, PathSegment, Token, Type, + TypeParam, Visibility, }; pub mod parser; @@ -18,7 +18,7 @@ pub struct GenerateInstructionType { impl GenerateInstructionType { pub fn emit_arg_types(&self, tokens: &mut TokenStream) { for v in self.variants.iter() { - v.emit_type(&self.visibility, &self.type_parameters, tokens); + v.emit_type(&self.visibility, tokens); } } @@ -165,7 +165,7 @@ impl Parse for GenerateInstructionType { pub struct InstructionVariant { pub name: Ident, - pub type_: Option, + pub type_: Option>, pub space: Option, pub data: Option, pub arguments: Option, @@ -225,7 +225,7 @@ impl InstructionVariant { &self, enum_: &Ident, tokens: &mut TokenStream, - mut fn_: impl FnMut(&InstructionArguments, &Option, &Option) -> TokenStream, + mut fn_: impl FnMut(&InstructionArguments, &Option>, &Option) -> TokenStream, ) { let name = &self.name; let arguments = match &self.arguments { @@ -238,9 +238,10 @@ impl InstructionVariant { } Some(args) => args, }; + let data = &self.data.as_ref().map(|_| quote! { data,}); let arg_calls = fn_(arguments, &self.type_, &self.space); quote! { - #enum_ :: #name { arguments, data } => { + #enum_ :: #name { #data arguments } => { #arg_calls } } @@ -269,19 +270,14 @@ impl InstructionVariant { .to_tokens(tokens); } - fn emit_type( - &self, - vis: &Option, - type_parameters: &Punctuated, - tokens: &mut TokenStream, - ) { + fn emit_type(&self, vis: &Option, tokens: &mut TokenStream) { let arguments = match self.arguments { Some(ref a) => a, None => return, }; let name = self.args_name(); let type_parameters = if arguments.generic.is_some() { - Some(quote! { <#type_parameters> }) + Some(quote! { }) } else { None }; @@ -324,7 +320,7 @@ impl Parse for InstructionVariant { } enum VariantProperty { - Type(Expr), + Type(Option), Space(Expr), Data(Type), Arguments(InstructionArguments), @@ -336,7 +332,12 @@ impl VariantProperty { Ok(if lookahead.peek(Token![type]) { input.parse::()?; input.parse::()?; - VariantProperty::Type(input.parse::()?) + VariantProperty::Type(if input.peek(Token![!]) { + input.parse::()?; + None + } else { + Some(input.parse::()?) + }) } else if lookahead.peek(Ident) { let key = input.parse::()?; match &*key.to_string() { @@ -352,7 +353,7 @@ impl VariantProperty { let generics = if input.peek(Token![<]) { input.parse::()?; let gen_params = - Punctuated::::parse_separated_nonempty(input)?; + Punctuated::::parse_separated_nonempty(input)?; input.parse::]>()?; Some(gen_params) } else { @@ -380,13 +381,13 @@ impl VariantProperty { } pub struct InstructionArguments { - pub generic: Option>, + pub generic: Option>, pub fields: Punctuated, } impl InstructionArguments { pub fn parse( - generic: Option>, + generic: Option>, input: syn::parse::ParseStream, ) -> syn::Result { let fields = Punctuated::::parse_terminated_with( @@ -396,13 +397,17 @@ impl InstructionArguments { Ok(Self { generic, fields }) } - fn emit_visit(&self, parent_type: &Option, parent_space: &Option) -> TokenStream { + fn emit_visit( + &self, + parent_type: &Option>, + parent_space: &Option, + ) -> TokenStream { self.emit_visit_impl(parent_type, parent_space, ArgumentField::emit_visit) } fn emit_visit_mut( &self, - parent_type: &Option, + parent_type: &Option>, parent_space: &Option, ) -> TokenStream { self.emit_visit_impl(parent_type, parent_space, ArgumentField::emit_visit_mut) @@ -410,7 +415,7 @@ impl InstructionArguments { fn emit_visit_map( &self, - parent_type: &Option, + parent_type: &Option>, parent_space: &Option, ) -> TokenStream { self.emit_visit_impl(parent_type, parent_space, ArgumentField::emit_visit_map) @@ -418,14 +423,19 @@ impl InstructionArguments { fn emit_visit_impl( &self, - parent_type: &Option, + parent_type: &Option>, parent_space: &Option, - mut fn_: impl FnMut(&ArgumentField, &Option, &Option) -> TokenStream, + mut fn_: impl FnMut(&ArgumentField, &Option>, &Option, bool) -> TokenStream, ) -> TokenStream { + let is_ident = if let Some(ref generic) = self.generic { + generic.len() > 1 + } else { + false + }; let field_calls = self .fields .iter() - .map(|f| fn_(f, parent_type, parent_space)); + .map(|f| fn_(f, parent_type, parent_space, is_ident)); quote! { #(#field_calls)* } @@ -487,25 +497,37 @@ impl ArgumentField { input.parse::() } - fn emit_visit(&self, parent_type: &Option, parent_space: &Option) -> TokenStream { - self.emit_visit_impl(parent_type, parent_space, false) + fn emit_visit( + &self, + parent_type: &Option>, + parent_space: &Option, + is_ident: bool, + ) -> TokenStream { + self.emit_visit_impl(parent_type, parent_space, is_ident, false) } fn emit_visit_mut( &self, - parent_type: &Option, + parent_type: &Option>, parent_space: &Option, + is_ident: bool, ) -> TokenStream { - self.emit_visit_impl(parent_type, parent_space, true) + self.emit_visit_impl(parent_type, parent_space, is_ident, true) } fn emit_visit_impl( &self, - parent_type: &Option, + parent_type: &Option>, parent_space: &Option, + is_ident: bool, is_mut: bool, ) -> TokenStream { - let type_ = self.type_.as_ref().or(parent_type.as_ref()).unwrap(); + let (is_typeless, type_) = match (self.type_.as_ref(), parent_type) { + (Some(type_), _) => (false, Some(type_)), + (None, None) => panic!("No type set"), + (None, Some(None)) => (true, None), + (None, Some(Some(type_))) => (false, Some(type_)), + }; let space = self .space .as_ref() @@ -514,38 +536,72 @@ impl ArgumentField { .unwrap_or_else(|| quote! { StateSpace::Reg }); let is_dst = self.is_dst; let name = &self.name; - let (operand_fn, arguments_name) = if is_mut { - ( - quote! { - VisitOperand::visit_mut - }, - quote! { - &mut arguments.#name - }, - ) + let type_space = if is_typeless { + quote! { + let type_space = None; + } } else { - ( - quote! { - VisitOperand::visit - }, - quote! { - & arguments.#name - }, - ) + quote! { + let type_ = #type_; + let space = #space; + let type_space = Some((std::borrow::Borrow::::borrow(&type_), space)); + } }; - quote! {{ - let type_ = #type_; - let space = #space; - #operand_fn(#arguments_name, |x| visitor.visit(x, &type_, space, #is_dst)); - }} + if is_ident { + if is_mut { + quote! { + { + #type_space + visitor.visit_ident(&mut arguments.#name, type_space, #is_dst); + } + } + } else { + quote! { + { + #type_space + visitor.visit_ident(& arguments.#name, type_space, #is_dst); + } + } + } + } else { + let (operand_fn, arguments_name) = if is_mut { + ( + quote! { + VisitOperand::visit_mut + }, + quote! { + &mut arguments.#name + }, + ) + } else { + ( + quote! { + VisitOperand::visit + }, + quote! { + & arguments.#name + }, + ) + }; + quote! {{ + #type_space + #operand_fn(#arguments_name, |x| visitor.visit(x, type_space, #is_dst)); + }} + } } fn emit_visit_map( &self, - parent_type: &Option, + parent_type: &Option>, parent_space: &Option, + is_ident: bool, ) -> TokenStream { - let type_ = self.type_.as_ref().or(parent_type.as_ref()).unwrap(); + let (is_typeless, type_) = match (self.type_.as_ref(), parent_type) { + (Some(type_), _) => (false, Some(type_)), + (None, None) => panic!("No type set"), + (None, Some(None)) => (true, None), + (None, Some(Some(type_))) => (false, Some(type_)), + }; let space = self .space .as_ref() @@ -554,11 +610,30 @@ impl ArgumentField { .unwrap_or_else(|| quote! { StateSpace::Reg }); let is_dst = self.is_dst; let name = &self.name; - quote! { - let #name = { + let type_space = if is_typeless { + quote! { + let type_space = None; + } + } else { + quote! { let type_ = #type_; let space = #space; - MapOperand::map(arguments.#name, |x| visitor.visit(x, &type_, space, #is_dst)) + let type_space = Some((std::borrow::Borrow::::borrow(&type_), space)); + } + }; + let map_call = if is_ident { + quote! { + visitor.visit_ident(arguments.#name, type_space, #is_dst) + } + } else { + quote! { + MapOperand::map(arguments.#name, |x| visitor.visit(x, type_space, #is_dst)) + } + }; + quote! { + let #name = { + #type_space + #map_call }; } } @@ -702,27 +777,6 @@ mod tests { assert!(matches!(src.space, None)); } - #[test] - fn visit_variant() { - let input = quote! { - Ld { - type: ScalarType::U32, - data: LdDetails, - arguments

: { - dst: { - repr: P::Operand, - type: ScalarType::U32 - }, - src: P::Operand, - }, - } - }; - let variant = syn::parse2::(input).unwrap(); - let mut output = TokenStream::new(); - variant.emit_visit(&Ident::new("Instruction", Span::call_site()), &mut output); - assert_eq!(output.to_string(), "Instruction :: Ld { arguments , data } => { { let type_ = ScalarType :: U32 ; let space = StateSpace :: Reg ; visitor . visit (& arguments . dst , & type_ , space , true) ; } { let type_ = ScalarType :: U32 ; let space = StateSpace :: Reg ; visitor . visit (& arguments . src , & type_ , space , false) ; } }"); - } - #[test] fn visit_variant_empty() { let input = quote! { diff --git a/gen_impl/src/parser.rs b/gen_impl/src/parser.rs index 519bf12..ea5070d 100644 --- a/gen_impl/src/parser.rs +++ b/gen_impl/src/parser.rs @@ -86,13 +86,27 @@ impl Parse for OpcodeDecl { } } -pub struct CodeBlock(pub proc_macro2::Group); +pub struct CodeBlock { + pub special: bool, + pub code: proc_macro2::Group, +} impl Parse for CodeBlock { fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::]>()?; - let group = input.parse::()?; - Ok(Self(group)) + let lookahead = input.lookahead1(); + let (special, code) = if lookahead.peek(Token![<]) { + input.parse::()?; + input.parse::()?; + //input.parse::]>()?; + (true, input.parse::()?) + } else if lookahead.peek(Token![=]) { + input.parse::()?; + input.parse::]>()?; + (false, input.parse::()?) + } else { + return Err(lookahead.error()); + }; + Ok(Self{special, code}) } } @@ -761,7 +775,7 @@ mod tests { .ss: StateSpace = { .global, .local, .param{::func}, .shared{::cta, ::cluster} } }; let rule = syn::parse2::(input).unwrap(); - assert_eq!(". ss", rule.modifier.tokens().to_string()); + assert_eq!(". ss", rule.modifier.unwrap().tokens().to_string()); assert_eq!( "StateSpace", rule.type_.unwrap().to_token_stream().to_string() @@ -791,7 +805,7 @@ mod tests { .cop: StCacheOperator = { .wb, .cg, .cs, .wt } }; let rule = syn::parse2::(input).unwrap(); - assert_eq!(". cop", rule.modifier.tokens().to_string()); + assert_eq!(". cop", rule.modifier.unwrap().tokens().to_string()); assert_eq!( "StCacheOperator", rule.type_.unwrap().to_token_stream().to_string() @@ -819,4 +833,12 @@ mod tests { assert!(!a.can_be_negated); assert!(a.unified); } + + #[test] + fn special_block() { + let input = quote! { + bra <= { bra(stream) } + }; + syn::parse2::(input).unwrap(); + } } diff --git a/ptx_parser/src/ast.rs b/ptx_parser/src/ast.rs index e456e03..6f1a9e3 100644 --- a/ptx_parser/src/ast.rs +++ b/ptx_parser/src/ast.rs @@ -31,7 +31,7 @@ gen::generate_instruction_type!( } }, Add { - type: { data.type_().into() }, + type: { Type::from(data.type_()) }, data: ArithDetails, arguments: { dst: T, @@ -51,12 +51,12 @@ gen::generate_instruction_type!( } }, Mul { - type: { data.type_().into() }, + type: { Type::from(data.type_()) }, data: MulDetails, arguments: { dst: { repr: T, - type: { data.dst_type().into() }, + type: { Type::from(data.dst_type()) }, }, src1: T, src2: T, @@ -67,19 +67,19 @@ gen::generate_instruction_type!( arguments: { dst1: { repr: T, - type: ScalarType::Pred.into() + type: Type::from(ScalarType::Pred) }, dst2: { repr: Option, - type: ScalarType::Pred.into() + type: Type::from(ScalarType::Pred) }, src1: { repr: T, - type: data.type_.into(), + type: Type::from(data.type_), }, src2: { repr: T, - type: data.type_.into(), + type: Type::from(data.type_), } } }, @@ -88,26 +88,58 @@ gen::generate_instruction_type!( arguments: { dst1: { repr: T, - type: ScalarType::Pred.into() + type: Type::from(ScalarType::Pred) }, dst2: { repr: Option, - type: ScalarType::Pred.into() + type: Type::from(ScalarType::Pred) }, src1: { repr: T, - type: data.base.type_.into(), + type: Type::from(data.base.type_), }, src2: { repr: T, - type: data.base.type_.into(), + type: Type::from(data.base.type_), }, src3: { repr: T, - type: ScalarType::Pred.into() + type: Type::from(ScalarType::Pred) } } }, + Not { + data: ScalarType, + type: { Type::Scalar(data.clone()) }, + arguments: { + dst: T, + src: T, + } + }, + Or { + data: ScalarType, + type: { Type::Scalar(data.clone()) }, + arguments: { + dst: T, + src1: T, + src2: T, + } + }, + And { + data: ScalarType, + type: { Type::Scalar(data.clone()) }, + arguments: { + dst: T, + src1: T, + src2: T, + } + }, + Bra { + type: !, + arguments: { + src: T + } + }, Ret { data: RetData }, @@ -115,21 +147,26 @@ gen::generate_instruction_type!( } ); -pub trait Visitor { - fn visit(&mut self, args: &T, type_: &Type, space: StateSpace, is_dst: bool); +pub trait Visitor { + fn visit(&mut self, args: &T, type_space: Option<(&Type, StateSpace)>, is_dst: bool); + fn visit_ident(&self, args: &T::Ident, type_space: Option<(&Type, StateSpace)>, is_dst: bool); } -pub trait VisitorMut { - fn visit(&mut self, args: &mut T, type_: &Type, space: StateSpace, is_dst: bool); +pub trait VisitorMut { + fn visit(&mut self, args: &mut T, type_space: Option<(&Type, StateSpace)>, is_dst: bool); + fn visit_ident(&mut self, args: &mut T::Ident, type_space: Option<(&Type, StateSpace)>, is_dst: bool); } -pub trait VisitorMap { - fn visit(&mut self, args: From, type_: &Type, space: StateSpace, is_dst: bool) -> To; +pub trait VisitorMap { + fn visit(&mut self, args: From, type_space: Option<(&Type, StateSpace)>, is_dst: bool) -> To; + fn visit_ident(&mut self, args: From::Ident, type_space: Option<(&Type, StateSpace)>, is_dst: bool) -> To::Ident; } trait VisitOperand { - type Operand; + type Operand: Operand; + #[allow(unused)] // Used by generated code fn visit(&self, fn_: impl FnOnce(&Self::Operand)); + #[allow(unused)] // Used by generated code fn visit_mut(&mut self, fn_: impl FnOnce(&mut Self::Operand)); } @@ -156,6 +193,7 @@ impl VisitOperand for Option { trait MapOperand: Sized { type Input; type Output; + #[allow(unused)] // Used by generated code fn map(self, fn_: impl FnOnce(Self::Input) -> U) -> Self::Output; } @@ -289,12 +327,12 @@ pub enum ParsedOperand { VecPack(Vec), } -impl Operand for ParsedOperand { +impl Operand for ParsedOperand { type Ident = Ident; } pub trait Operand { - type Ident; + type Ident: Copy; } #[derive(Copy, Clone)] @@ -447,6 +485,7 @@ pub enum MulDetails { } impl MulDetails { + #[allow(unused)] // Used by generated code fn type_(&self) -> ScalarType { match self { MulDetails::Integer { type_, .. } => *type_, @@ -454,6 +493,7 @@ impl MulDetails { } } + #[allow(unused)] // Used by generated code fn dst_type(&self) -> ScalarType { match self { MulDetails::Integer { @@ -521,7 +561,7 @@ impl SetpData { pub struct SetpBoolData { pub base: SetpData, pub bool_op: SetpBoolPostOp, - pub negate_src3: bool + pub negate_src3: bool, } #[derive(PartialEq, Eq, Copy, Clone)] diff --git a/ptx_parser/src/main.rs b/ptx_parser/src/main.rs index 785496d..a6a2381 100644 --- a/ptx_parser/src/main.rs +++ b/ptx_parser/src/main.rs @@ -623,7 +623,7 @@ fn predicated_instruction<'a, 'input>( } fn pred_at<'a, 'input>(stream: &mut PtxParser<'a, 'input>) -> PResult> { - (Token::At, opt(Token::Not), ident) + (Token::At, opt(Token::Exclamation), ident) .map(|(_, not, label)| ast::PredAt { not: not.is_some(), label, @@ -888,6 +888,21 @@ impl<'input, I: Stream + StreamIsPartial, E: ParserError> Parse } } +fn bra<'a, 'input>( + stream: &mut PtxParser<'a, 'input>, +) -> PResult>> { + preceded( + opt(Token::DotUni), + any.verify_map(|t| match t { + Token::Ident(ident) => Some(ast::Instruction::Bra { + arguments: BraArgs { src: ident }, + }), + _ => None, + }), + ) + .parse_next(stream) +} + // Modifiers are turned into arguments to the blocks, with type: // * If it is an alternative: // * If it is mandatory then its type is Foo (as defined by the relevant rule) @@ -919,9 +934,9 @@ derive_parser!( #[regex(r#""[^"]*""#)] String, #[token("|")] - Or, + Pipe, #[token("!")] - Not, + Exclamation, #[token("(")] LParen, #[token(")")] @@ -1461,6 +1476,36 @@ derive_parser!( .f32, .f64, .f16, .f16x2, .bf16, .bf16x2 }; + // https://docs.nvidia.com/cuda/parallel-thread-execution/index.html#logic-and-shift-instructions-not + not.type d, a => { + ast::Instruction::Not { + data: type_, + arguments: NotArgs { dst: d, src: a } + } + } + .type: ScalarType = { .pred, .b16, .b32, .b64 }; + + // https://docs.nvidia.com/cuda/parallel-thread-execution/index.html#logic-and-shift-instructions-or + or.type d, a, b => { + ast::Instruction::Or { + data: type_, + arguments: OrArgs { dst: d, src1: a, src2: b } + } + } + .type: ScalarType = { .pred, .b16, .b32, .b64 }; + + // https://docs.nvidia.com/cuda/parallel-thread-execution/index.html#logic-and-shift-instructions-and + and.type d, a, b => { + ast::Instruction::And { + data: type_, + arguments: AndArgs { dst: d, src1: a, src2: b } + } + } + .type: ScalarType = { .pred, .b16, .b32, .b64 }; + + // https://docs.nvidia.com/cuda/parallel-thread-execution/index.html#control-flow-instructions-bra + bra <= { bra(stream) } + // https://docs.nvidia.com/cuda/parallel-thread-execution/index.html#control-flow-instructions-ret ret{.uni} => { Instruction::Ret { data: RetData { uniform: uni } }