Skip to content

Commit

Permalink
Support default value in schema
Browse files Browse the repository at this point in the history
  • Loading branch information
lewiszlw committed Mar 18, 2024
1 parent 0bad65e commit 1f13399
Show file tree
Hide file tree
Showing 9 changed files with 206 additions and 19 deletions.
1 change: 1 addition & 0 deletions bustubx/src/catalog/catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,7 @@ impl Catalog {
col.name.clone().into(),
format!("{sql_type}").into(),
col.nullable.into(),
format!("{}", col.default).into(),
],
);
columns_table
Expand Down
4 changes: 3 additions & 1 deletion bustubx/src/catalog/column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use derive_with::With;
use std::sync::Arc;

use crate::catalog::DataType;
use crate::common::TableReference;
use crate::common::{ScalarValue, TableReference};

pub type ColumnRef = Arc<Column>;

Expand All @@ -12,6 +12,7 @@ pub struct Column {
pub name: String,
pub data_type: DataType,
pub nullable: bool,
pub default: ScalarValue,
}

impl PartialEq for Column {
Expand All @@ -29,6 +30,7 @@ impl Column {
name: name.into(),
data_type,
nullable,
default: ScalarValue::new_empty(data_type),
}
}
}
9 changes: 7 additions & 2 deletions bustubx/src/catalog/information.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use crate::buffer::{AtomicPageId, PageId, INVALID_PAGE_ID};
use crate::catalog::catalog::{CatalogSchema, CatalogTable};
use crate::catalog::{Catalog, Column, DataType, Schema, SchemaRef, DEFAULT_SCHEMA_NAME};
use crate::common::{ScalarValue, TableReference};
use crate::storage::codec::TablePageCodec;
use crate::storage::TableHeap;
use crate::{BustubxError, BustubxResult, Database};

Expand Down Expand Up @@ -35,6 +34,7 @@ lazy_static::lazy_static! {
Column::new("column_name", DataType::Varchar(None), false),
Column::new("data_type", DataType::Varchar(None), false),
Column::new("nullable", DataType::Boolean, false),
Column::new("default", DataType::Varchar(None), false),
]));

pub static ref INDEXES_SCHMEA: SchemaRef = Arc::new(Schema::new(vec![
Expand Down Expand Up @@ -207,8 +207,13 @@ fn load_user_tables(db: &mut Database) -> BustubxResult<()> {
let ScalarValue::Boolean(Some(nullable)) = column_tuple.value(5)? else {
return error;
};
let ScalarValue::Varchar(Some(default)) = column_tuple.value(6)? else {
return error;
};
let data_type: DataType = data_type_str.as_str().try_into()?;
columns.push(Column::new(column_name.clone(), data_type, *nullable));
let default = ScalarValue::from_string(default, data_type)?;
columns
.push(Column::new(column_name.clone(), data_type, *nullable).with_default(default));
}
let schema = Arc::new(Schema::new(columns));

Expand Down
131 changes: 131 additions & 0 deletions bustubx/src/common/scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,137 @@ impl ScalarValue {
pub fn wrapping_sub(&self, _other: Self) -> BustubxResult<Self> {
todo!()
}

pub fn from_string(string: &String, data_type: DataType) -> BustubxResult<Self> {
let is_null = string.eq_ignore_ascii_case("null");
match data_type {
DataType::Boolean => {
let v = if is_null {
None
} else {
let v = string
.parse::<bool>()
.map_err(|_| BustubxError::Internal("Parse bool failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::Boolean(v))
}
DataType::Int8 => {
let v = if is_null {
None
} else {
let v = string
.parse::<i8>()
.map_err(|_| BustubxError::Internal("Parse i8 failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::Int8(v))
}
DataType::Int16 => {
let v = if is_null {
None
} else {
let v = string
.parse::<i16>()
.map_err(|_| BustubxError::Internal("Parse i16 failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::Int16(v))
}
DataType::Int32 => {
let v = if is_null {
None
} else {
let v = string
.parse::<i32>()
.map_err(|_| BustubxError::Internal("Parse i32 failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::Int32(v))
}
DataType::Int64 => {
let v = if is_null {
None
} else {
let v = string
.parse::<i64>()
.map_err(|_| BustubxError::Internal("Parse i64 failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::Int64(v))
}
DataType::UInt8 => {
let v = if is_null {
None
} else {
let v = string
.parse::<u8>()
.map_err(|_| BustubxError::Internal("Parse u8 failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::UInt8(v))
}
DataType::UInt16 => {
let v = if is_null {
None
} else {
let v = string
.parse::<u16>()
.map_err(|_| BustubxError::Internal("Parse u16 failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::UInt16(v))
}
DataType::UInt32 => {
let v = if is_null {
None
} else {
let v = string
.parse::<u32>()
.map_err(|_| BustubxError::Internal("Parse u32 failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::UInt32(v))
}
DataType::UInt64 => {
let v = if is_null {
None
} else {
let v = string
.parse::<u64>()
.map_err(|_| BustubxError::Internal("Parse u64 failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::UInt64(v))
}
DataType::Float32 => {
let v = if is_null {
None
} else {
let v = string
.parse::<f32>()
.map_err(|_| BustubxError::Internal("Parse f32 failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::Float32(v))
}
DataType::Float64 => {
let v = if is_null {
None
} else {
let v = string
.parse::<f64>()
.map_err(|_| BustubxError::Internal("Parse f64 failed".to_string()))?;
Some(v)
};
Ok(ScalarValue::Float64(v))
}
DataType::Varchar(_) => {
let v = if is_null { None } else { Some(string.clone()) };
Ok(ScalarValue::Varchar(v))
}
}
}
}

impl PartialEq for ScalarValue {
Expand Down
19 changes: 14 additions & 5 deletions bustubx/src/execution/physical_plan/insert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,23 @@ impl VolcanoExecutor for PhysicalInsert {
let mut casted_data = vec![];
for (idx, value) in tuple.data.iter().enumerate() {
let target_type = self.projected_schema.column_with_index(idx)?.data_type;
if target_type == value.data_type() {
casted_data.push(value.clone());
casted_data.push(value.cast_to(&target_type)?);
}

// fill default values
let mut full_data = vec![];
for col in self.table_schema.columns.iter() {
if let Ok(idx) = self
.projected_schema
.index_of(col.relation.as_ref(), &col.name)
{
full_data.push(casted_data[idx].clone());
} else {
casted_data.push(value.cast_to(&target_type)?);
full_data.push(col.default.clone())
}
}
// TODO fill default values in data
let tuple = Tuple::new(self.table_schema.clone(), casted_data);

let tuple = Tuple::new(self.table_schema.clone(), full_data);

let table_heap = context.catalog.table_heap(&self.table)?;
let rid = table_heap.insert_tuple(&EMPTY_TUPLE_META, &tuple)?;
Expand Down
39 changes: 32 additions & 7 deletions bustubx/src/planner/logical_planner/plan_create_table.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
use crate::{BustubxError, BustubxResult};
use std::collections::HashSet;

use crate::catalog::Column;
use crate::catalog::{Column, DataType};
use crate::common::ScalarValue;
use crate::expression::Expr;
use crate::planner::logical_plan::{CreateTable, LogicalPlan};

use super::LogicalPlanner;
Expand All @@ -15,17 +17,40 @@ impl<'a> LogicalPlanner<'a> {
let name = self.bind_table_name(name)?;
let mut columns = vec![];
for col_def in column_defs {
let data_type: DataType = (&col_def.data_type).try_into()?;
let not_null: bool = col_def
.options
.iter()
.any(|opt| matches!(opt.option, sqlparser::ast::ColumnOption::NotNull));
let default_expr: Option<&sqlparser::ast::Expr> = col_def
.options
.iter()
.find(|opt| matches!(opt.option, sqlparser::ast::ColumnOption::Default(_)))
.map(|opt| {
if let sqlparser::ast::ColumnOption::Default(expr) = &opt.option {
expr
} else {
unreachable!()
}
});
let default = if let Some(expr) = default_expr {
let expr = self.bind_expr(expr)?;
match expr {
Expr::Literal(lit) => lit.value.cast_to(&data_type)?,
_ => {
return Err(BustubxError::Internal(
"The expr is not literal".to_string(),
))
}
}
} else {
ScalarValue::new_empty(data_type)
};

columns.push(
Column::new(
col_def.name.value.clone(),
(&col_def.data_type).try_into()?,
!not_null,
)
.with_relation(Some(name.clone())),
Column::new(col_def.name.value.clone(), data_type, !not_null)
.with_relation(Some(name.clone()))
.with_default(default),
)
}

Expand Down
1 change: 0 additions & 1 deletion bustubx/src/storage/page/table_page.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ pub struct TablePage {
pub data: [u8; BUSTUBX_PAGE_SIZE],
}

// TODO do we need pre_page_id?
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct TablePageHeader {
pub next_page_id: PageId,
Expand Down
19 changes: 17 additions & 2 deletions tests/sqllogictest/slt/insert.slt
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,23 @@ insert into t1 values
(1, 2, 3, 4, 5, 6, 7, 8, 1.1, 'a'),
(NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)

query IRT
query
select * from t1
----
1 2 3 4 5 6 7 8 1.1 a
NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL
NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL


statement ok
create table t2 (
a tinyint,
b integer default 1,
)

statement ok
insert into t2(a) values (1)

query
select * from t2
----
1 1
2 changes: 1 addition & 1 deletion tests/sqllogictest/slt/update.slt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ insert into t1 values (1, 2, 'x')
statement ok
update t1 set b = 3, c = 'xx' where a = 1;

query II
query
select * from t1
----
1 3 xx

0 comments on commit 1f13399

Please sign in to comment.